VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 54763

Last change on this file since 54763 was 53423, checked in by vboxsync, 10 years ago

IEMAllInstructions.cpp.h: evex & xop notes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 590.4 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 53423 2014-12-02 09:24:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2013 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_NO_REAL_OR_V86_MODE();
544
545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
546 {
547 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
548 switch (pIemCpu->enmEffOpSize)
549 {
550 case IEMMODE_16BIT:
551 IEM_MC_BEGIN(0, 1);
552 IEM_MC_LOCAL(uint16_t, u16Ldtr);
553 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
554 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
555 IEM_MC_ADVANCE_RIP();
556 IEM_MC_END();
557 break;
558
559 case IEMMODE_32BIT:
560 IEM_MC_BEGIN(0, 1);
561 IEM_MC_LOCAL(uint32_t, u32Ldtr);
562 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
563 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
564 IEM_MC_ADVANCE_RIP();
565 IEM_MC_END();
566 break;
567
568 case IEMMODE_64BIT:
569 IEM_MC_BEGIN(0, 1);
570 IEM_MC_LOCAL(uint64_t, u64Ldtr);
571 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
572 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
573 IEM_MC_ADVANCE_RIP();
574 IEM_MC_END();
575 break;
576
577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
578 }
579 }
580 else
581 {
582 IEM_MC_BEGIN(0, 2);
583 IEM_MC_LOCAL(uint16_t, u16Ldtr);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
586 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
587 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
588 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
589 IEM_MC_ADVANCE_RIP();
590 IEM_MC_END();
591 }
592 return VINF_SUCCESS;
593}
594
595
596/** Opcode 0x0f 0x00 /1. */
597FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
598{
599 IEMOP_MNEMONIC("str Rv/Mw");
600 IEMOP_HLP_NO_REAL_OR_V86_MODE();
601
602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
603 {
604 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
605 switch (pIemCpu->enmEffOpSize)
606 {
607 case IEMMODE_16BIT:
608 IEM_MC_BEGIN(0, 1);
609 IEM_MC_LOCAL(uint16_t, u16Tr);
610 IEM_MC_FETCH_TR_U16(u16Tr);
611 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
612 IEM_MC_ADVANCE_RIP();
613 IEM_MC_END();
614 break;
615
616 case IEMMODE_32BIT:
617 IEM_MC_BEGIN(0, 1);
618 IEM_MC_LOCAL(uint32_t, u32Tr);
619 IEM_MC_FETCH_TR_U32(u32Tr);
620 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
621 IEM_MC_ADVANCE_RIP();
622 IEM_MC_END();
623 break;
624
625 case IEMMODE_64BIT:
626 IEM_MC_BEGIN(0, 1);
627 IEM_MC_LOCAL(uint64_t, u64Tr);
628 IEM_MC_FETCH_TR_U64(u64Tr);
629 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
630 IEM_MC_ADVANCE_RIP();
631 IEM_MC_END();
632 break;
633
634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
635 }
636 }
637 else
638 {
639 IEM_MC_BEGIN(0, 2);
640 IEM_MC_LOCAL(uint16_t, u16Tr);
641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
643 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
644 IEM_MC_FETCH_TR_U16(u16Tr);
645 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
646 IEM_MC_ADVANCE_RIP();
647 IEM_MC_END();
648 }
649 return VINF_SUCCESS;
650}
651
652
653/** Opcode 0x0f 0x00 /2. */
654FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
655{
656 IEMOP_MNEMONIC("lldt Ew");
657 IEMOP_HLP_NO_REAL_OR_V86_MODE();
658
659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
660 {
661 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
662 IEM_MC_BEGIN(1, 0);
663 IEM_MC_ARG(uint16_t, u16Sel, 0);
664 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
665 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
666 IEM_MC_END();
667 }
668 else
669 {
670 IEM_MC_BEGIN(1, 1);
671 IEM_MC_ARG(uint16_t, u16Sel, 0);
672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
674 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
675 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
676 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
677 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
678 IEM_MC_END();
679 }
680 return VINF_SUCCESS;
681}
682
683
684/** Opcode 0x0f 0x00 /3. */
685FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
686{
687 IEMOP_MNEMONIC("ltr Ew");
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689
690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
691 {
692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
693 IEM_MC_BEGIN(1, 0);
694 IEM_MC_ARG(uint16_t, u16Sel, 0);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
696 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
697 IEM_MC_END();
698 }
699 else
700 {
701 IEM_MC_BEGIN(1, 1);
702 IEM_MC_ARG(uint16_t, u16Sel, 0);
703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
706 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
707 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
708 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
709 IEM_MC_END();
710 }
711 return VINF_SUCCESS;
712}
713
714
715/** Opcode 0x0f 0x00 /3. */
716FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
717{
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
723 IEM_MC_BEGIN(2, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
726 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
727 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
728 IEM_MC_END();
729 }
730 else
731 {
732 IEM_MC_BEGIN(2, 1);
733 IEM_MC_ARG(uint16_t, u16Sel, 0);
734 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
737 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
738 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
739 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
740 IEM_MC_END();
741 }
742 return VINF_SUCCESS;
743}
744
745
746/** Opcode 0x0f 0x00 /4. */
747FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
748{
749 IEMOP_MNEMONIC("verr Ew");
750 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
751}
752
753
754/** Opcode 0x0f 0x00 /5. */
755FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
756{
757 IEMOP_MNEMONIC("verr Ew");
758 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
759}
760
761
762/** Opcode 0x0f 0x00. */
763FNIEMOP_DEF(iemOp_Grp6)
764{
765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
767 {
768 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
769 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
770 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
771 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
772 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
773 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
774 case 6: return IEMOP_RAISE_INVALID_OPCODE();
775 case 7: return IEMOP_RAISE_INVALID_OPCODE();
776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
777 }
778
779}
780
781
782/** Opcode 0x0f 0x01 /0. */
783FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
784{
785 IEMOP_MNEMONIC("sgdt Ms");
786 IEMOP_HLP_64BIT_OP_SIZE();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_ARG(uint8_t, iEffSeg, 0);
789 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
790 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
794 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
795 IEM_MC_END();
796 return VINF_SUCCESS;
797}
798
799
800/** Opcode 0x0f 0x01 /0. */
801FNIEMOP_DEF(iemOp_Grp7_vmcall)
802{
803 IEMOP_BITCH_ABOUT_STUB();
804 return IEMOP_RAISE_INVALID_OPCODE();
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmresume)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmxoff)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /1. */
833FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
834{
835 IEMOP_MNEMONIC("sidt Ms");
836 IEMOP_HLP_64BIT_OP_SIZE();
837 IEM_MC_BEGIN(3, 1);
838 IEM_MC_ARG(uint8_t, iEffSeg, 0);
839 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
840 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
843 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
844 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
845 IEM_MC_END();
846 return VINF_SUCCESS;
847}
848
849
850/** Opcode 0x0f 0x01 /1. */
851FNIEMOP_DEF(iemOp_Grp7_monitor)
852{
853 IEMOP_MNEMONIC("monitor");
854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
855 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_mwait)
861{
862 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
864 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
865}
866
867
868/** Opcode 0x0f 0x01 /2. */
869FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC("lgdt");
872 IEMOP_HLP_64BIT_OP_SIZE();
873 IEM_MC_BEGIN(3, 1);
874 IEM_MC_ARG(uint8_t, iEffSeg, 0);
875 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
876 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
880 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /2. */
887FNIEMOP_DEF(iemOp_Grp7_xgetbv)
888{
889 AssertFailed();
890 return IEMOP_RAISE_INVALID_OPCODE();
891}
892
893
894/** Opcode 0x0f 0x01 /2. */
895FNIEMOP_DEF(iemOp_Grp7_xsetbv)
896{
897 AssertFailed();
898 return IEMOP_RAISE_INVALID_OPCODE();
899}
900
901
902/** Opcode 0x0f 0x01 /3. */
903FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
904{
905 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
906 ? IEMMODE_64BIT
907 : pIemCpu->enmEffOpSize;
908 IEM_MC_BEGIN(3, 1);
909 IEM_MC_ARG(uint8_t, iEffSeg, 0);
910 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
911 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
914 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
915 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
916 IEM_MC_END();
917 return VINF_SUCCESS;
918}
919
920
921/** Opcode 0x0f 0x01 0xd8. */
922FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
923
924/** Opcode 0x0f 0x01 0xd9. */
925FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
926
927/** Opcode 0x0f 0x01 0xda. */
928FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
929
930/** Opcode 0x0f 0x01 0xdb. */
931FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
932
933/** Opcode 0x0f 0x01 0xdc. */
934FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
935
936/** Opcode 0x0f 0x01 0xdd. */
937FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
938
939/** Opcode 0x0f 0x01 0xde. */
940FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
941
942/** Opcode 0x0f 0x01 0xdf. */
943FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
944
945/** Opcode 0x0f 0x01 /4. */
946FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
947{
948 IEMOP_MNEMONIC("smsw");
949 IEMOP_HLP_NO_LOCK_PREFIX();
950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
951 {
952 switch (pIemCpu->enmEffOpSize)
953 {
954 case IEMMODE_16BIT:
955 IEM_MC_BEGIN(0, 1);
956 IEM_MC_LOCAL(uint16_t, u16Tmp);
957 IEM_MC_FETCH_CR0_U16(u16Tmp);
958 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
959 IEM_MC_ADVANCE_RIP();
960 IEM_MC_END();
961 return VINF_SUCCESS;
962
963 case IEMMODE_32BIT:
964 IEM_MC_BEGIN(0, 1);
965 IEM_MC_LOCAL(uint32_t, u32Tmp);
966 IEM_MC_FETCH_CR0_U32(u32Tmp);
967 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
968 IEM_MC_ADVANCE_RIP();
969 IEM_MC_END();
970 return VINF_SUCCESS;
971
972 case IEMMODE_64BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint64_t, u64Tmp);
975 IEM_MC_FETCH_CR0_U64(u64Tmp);
976 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
977 IEM_MC_ADVANCE_RIP();
978 IEM_MC_END();
979 return VINF_SUCCESS;
980
981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
982 }
983 }
984 else
985 {
986 /* Ignore operand size here, memory refs are always 16-bit. */
987 IEM_MC_BEGIN(0, 2);
988 IEM_MC_LOCAL(uint16_t, u16Tmp);
989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
991 IEM_MC_FETCH_CR0_U16(u16Tmp);
992 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
993 IEM_MC_ADVANCE_RIP();
994 IEM_MC_END();
995 return VINF_SUCCESS;
996 }
997}
998
999
1000/** Opcode 0x0f 0x01 /6. */
1001FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1002{
1003 /* The operand size is effectively ignored, all is 16-bit and only the
1004 lower 3-bits are used. */
1005 IEMOP_MNEMONIC("lmsw");
1006 IEMOP_HLP_NO_LOCK_PREFIX();
1007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1008 {
1009 IEM_MC_BEGIN(1, 0);
1010 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1011 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1012 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1013 IEM_MC_END();
1014 }
1015 else
1016 {
1017 IEM_MC_BEGIN(1, 1);
1018 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1021 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1022 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1023 IEM_MC_END();
1024 }
1025 return VINF_SUCCESS;
1026}
1027
1028
1029/** Opcode 0x0f 0x01 /7. */
1030FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1031{
1032 IEMOP_MNEMONIC("invlpg");
1033 IEMOP_HLP_NO_LOCK_PREFIX();
1034 IEM_MC_BEGIN(1, 1);
1035 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1037 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1038 IEM_MC_END();
1039 return VINF_SUCCESS;
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /7. */
1044FNIEMOP_DEF(iemOp_Grp7_swapgs)
1045{
1046 IEMOP_MNEMONIC("swapgs");
1047 IEMOP_HLP_NO_LOCK_PREFIX();
1048 IEMOP_HLP_ONLY_64BIT();
1049 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1050}
1051
1052
1053/** Opcode 0x0f 0x01 /7. */
1054FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1055{
1056 NOREF(pIemCpu);
1057 IEMOP_BITCH_ABOUT_STUB();
1058 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1059}
1060
1061
1062/** Opcode 0x0f 0x01. */
1063FNIEMOP_DEF(iemOp_Grp7)
1064{
1065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1066 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1067 {
1068 case 0:
1069 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1070 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1071 switch (bRm & X86_MODRM_RM_MASK)
1072 {
1073 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1074 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1075 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1076 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1077 }
1078 return IEMOP_RAISE_INVALID_OPCODE();
1079
1080 case 1:
1081 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1082 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1083 switch (bRm & X86_MODRM_RM_MASK)
1084 {
1085 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1086 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1087 }
1088 return IEMOP_RAISE_INVALID_OPCODE();
1089
1090 case 2:
1091 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1092 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1093 switch (bRm & X86_MODRM_RM_MASK)
1094 {
1095 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1096 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1097 }
1098 return IEMOP_RAISE_INVALID_OPCODE();
1099
1100 case 3:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1106 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1107 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1108 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1109 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1110 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1111 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1112 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1114 }
1115
1116 case 4:
1117 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1118
1119 case 5:
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 6:
1123 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1124
1125 case 7:
1126 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1127 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1128 switch (bRm & X86_MODRM_RM_MASK)
1129 {
1130 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1131 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1132 }
1133 return IEMOP_RAISE_INVALID_OPCODE();
1134
1135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1136 }
1137}
1138
1139/** Opcode 0x0f 0x00 /3. */
1140FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1141{
1142 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1144
1145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1146 {
1147 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1148 switch (pIemCpu->enmEffOpSize)
1149 {
1150 case IEMMODE_16BIT:
1151 {
1152 IEM_MC_BEGIN(4, 0);
1153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1154 IEM_MC_ARG(uint16_t, u16Sel, 1);
1155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1156 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1157
1158 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1160 IEM_MC_REF_EFLAGS(pEFlags);
1161 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1162
1163 IEM_MC_END();
1164 return VINF_SUCCESS;
1165 }
1166
1167 case IEMMODE_32BIT:
1168 case IEMMODE_64BIT:
1169 {
1170 IEM_MC_BEGIN(4, 0);
1171 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1172 IEM_MC_ARG(uint16_t, u16Sel, 1);
1173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1174 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1175
1176 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1177 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1178 IEM_MC_REF_EFLAGS(pEFlags);
1179 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1180
1181 IEM_MC_END();
1182 return VINF_SUCCESS;
1183 }
1184
1185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1186 }
1187 }
1188 else
1189 {
1190 switch (pIemCpu->enmEffOpSize)
1191 {
1192 case IEMMODE_16BIT:
1193 {
1194 IEM_MC_BEGIN(4, 1);
1195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1196 IEM_MC_ARG(uint16_t, u16Sel, 1);
1197 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1198 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1200
1201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1202 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1203
1204 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1205 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1206 IEM_MC_REF_EFLAGS(pEFlags);
1207 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1208
1209 IEM_MC_END();
1210 return VINF_SUCCESS;
1211 }
1212
1213 case IEMMODE_32BIT:
1214 case IEMMODE_64BIT:
1215 {
1216 IEM_MC_BEGIN(4, 1);
1217 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1220 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1222
1223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1224 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1225/** @todo testcase: make sure it's a 16-bit read. */
1226
1227 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1228 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1229 IEM_MC_REF_EFLAGS(pEFlags);
1230 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1231
1232 IEM_MC_END();
1233 return VINF_SUCCESS;
1234 }
1235
1236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1237 }
1238 }
1239}
1240
1241
1242
1243/** Opcode 0x0f 0x02. */
1244FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1245{
1246 IEMOP_MNEMONIC("lar Gv,Ew");
1247 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1248}
1249
1250
1251/** Opcode 0x0f 0x03. */
1252FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1253{
1254 IEMOP_MNEMONIC("lsl Gv,Ew");
1255 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1256}
1257
1258
1259/** Opcode 0x0f 0x04. */
1260FNIEMOP_DEF(iemOp_syscall)
1261{
1262 IEMOP_MNEMONIC("syscall");
1263 IEMOP_HLP_NO_LOCK_PREFIX();
1264 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1265}
1266
1267
1268/** Opcode 0x0f 0x05. */
1269FNIEMOP_DEF(iemOp_clts)
1270{
1271 IEMOP_MNEMONIC("clts");
1272 IEMOP_HLP_NO_LOCK_PREFIX();
1273 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1274}
1275
1276
1277/** Opcode 0x0f 0x06. */
1278FNIEMOP_DEF(iemOp_sysret)
1279{
1280 IEMOP_MNEMONIC("sysret");
1281 IEMOP_HLP_NO_LOCK_PREFIX();
1282 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1283}
1284
1285
1286/** Opcode 0x0f 0x08. */
1287FNIEMOP_STUB(iemOp_invd);
1288
1289
1290/** Opcode 0x0f 0x09. */
1291FNIEMOP_DEF(iemOp_wbinvd)
1292{
1293 IEMOP_MNEMONIC("wbinvd");
1294 IEMOP_HLP_NO_LOCK_PREFIX();
1295 IEM_MC_BEGIN(0, 0);
1296 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1297 IEM_MC_ADVANCE_RIP();
1298 IEM_MC_END();
1299 return VINF_SUCCESS; /* ignore for now */
1300}
1301
1302
1303/** Opcode 0x0f 0x0b. */
1304FNIEMOP_STUB(iemOp_ud2);
1305
1306/** Opcode 0x0f 0x0d. */
1307FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1308{
1309 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1310 if (!IEM_IS_AMD_CPUID_FEATURES_ANY_PRESENT(X86_CPUID_EXT_FEATURE_EDX_LONG_MODE | X86_CPUID_AMD_FEATURE_EDX_3DNOW,
1311 X86_CPUID_AMD_FEATURE_ECX_3DNOWPRF))
1312 {
1313 IEMOP_MNEMONIC("GrpP");
1314 return IEMOP_RAISE_INVALID_OPCODE();
1315 }
1316
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 IEMOP_MNEMONIC("GrpP");
1321 return IEMOP_RAISE_INVALID_OPCODE();
1322 }
1323
1324 IEMOP_HLP_NO_LOCK_PREFIX();
1325 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1326 {
1327 case 2: /* Aliased to /0 for the time being. */
1328 case 4: /* Aliased to /0 for the time being. */
1329 case 5: /* Aliased to /0 for the time being. */
1330 case 6: /* Aliased to /0 for the time being. */
1331 case 7: /* Aliased to /0 for the time being. */
1332 case 0: IEMOP_MNEMONIC("prefetch"); break;
1333 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1334 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1336 }
1337
1338 IEM_MC_BEGIN(0, 1);
1339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1341 /* Currently a NOP. */
1342 IEM_MC_ADVANCE_RIP();
1343 IEM_MC_END();
1344 return VINF_SUCCESS;
1345}
1346
1347
1348/** Opcode 0x0f 0x0e. */
1349FNIEMOP_STUB(iemOp_femms);
1350
1351
1352/** Opcode 0x0f 0x0f 0x0c. */
1353FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1354
1355/** Opcode 0x0f 0x0f 0x0d. */
1356FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1357
1358/** Opcode 0x0f 0x0f 0x1c. */
1359FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1360
1361/** Opcode 0x0f 0x0f 0x1d. */
1362FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1363
1364/** Opcode 0x0f 0x0f 0x8a. */
1365FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1366
1367/** Opcode 0x0f 0x0f 0x8e. */
1368FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1369
1370/** Opcode 0x0f 0x0f 0x90. */
1371FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1372
1373/** Opcode 0x0f 0x0f 0x94. */
1374FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1375
1376/** Opcode 0x0f 0x0f 0x96. */
1377FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1378
1379/** Opcode 0x0f 0x0f 0x97. */
1380FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1381
1382/** Opcode 0x0f 0x0f 0x9a. */
1383FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1384
1385/** Opcode 0x0f 0x0f 0x9e. */
1386FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1387
1388/** Opcode 0x0f 0x0f 0xa0. */
1389FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1390
1391/** Opcode 0x0f 0x0f 0xa4. */
1392FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1393
1394/** Opcode 0x0f 0x0f 0xa6. */
1395FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1396
1397/** Opcode 0x0f 0x0f 0xa7. */
1398FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1399
1400/** Opcode 0x0f 0x0f 0xaa. */
1401FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1402
1403/** Opcode 0x0f 0x0f 0xae. */
1404FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1405
1406/** Opcode 0x0f 0x0f 0xb0. */
1407FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1408
1409/** Opcode 0x0f 0x0f 0xb4. */
1410FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1411
1412/** Opcode 0x0f 0x0f 0xb6. */
1413FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1414
1415/** Opcode 0x0f 0x0f 0xb7. */
1416FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1417
1418/** Opcode 0x0f 0x0f 0xbb. */
1419FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1420
1421/** Opcode 0x0f 0x0f 0xbf. */
1422FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1423
1424
1425/** Opcode 0x0f 0x0f. */
1426FNIEMOP_DEF(iemOp_3Dnow)
1427{
1428 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_AMD_FEATURE_EDX_3DNOW))
1429 {
1430 IEMOP_MNEMONIC("3Dnow");
1431 return IEMOP_RAISE_INVALID_OPCODE();
1432 }
1433
1434 /* This is pretty sparse, use switch instead of table. */
1435 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1436 switch (b)
1437 {
1438 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1439 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1440 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1441 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1442 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1443 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1444 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1445 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1446 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1447 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1448 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1449 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1450 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1451 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1452 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1453 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1454 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1455 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1456 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1457 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1458 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1459 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1460 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1461 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1462 default:
1463 return IEMOP_RAISE_INVALID_OPCODE();
1464 }
1465}
1466
1467
1468/** Opcode 0x0f 0x10. */
1469FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1470/** Opcode 0x0f 0x11. */
1471FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1472/** Opcode 0x0f 0x12. */
1473FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1474/** Opcode 0x0f 0x13. */
1475FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1476/** Opcode 0x0f 0x14. */
1477FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1478/** Opcode 0x0f 0x15. */
1479FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1480/** Opcode 0x0f 0x16. */
1481FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1482/** Opcode 0x0f 0x17. */
1483FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1484
1485
1486/** Opcode 0x0f 0x18. */
1487FNIEMOP_DEF(iemOp_prefetch_Grp16)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_HLP_NO_LOCK_PREFIX();
1493 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1494 {
1495 case 4: /* Aliased to /0 for the time being according to AMD. */
1496 case 5: /* Aliased to /0 for the time being according to AMD. */
1497 case 6: /* Aliased to /0 for the time being according to AMD. */
1498 case 7: /* Aliased to /0 for the time being according to AMD. */
1499 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1500 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1501 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1502 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1504 }
1505
1506 IEM_MC_BEGIN(0, 1);
1507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1509 /* Currently a NOP. */
1510 IEM_MC_ADVANCE_RIP();
1511 IEM_MC_END();
1512 return VINF_SUCCESS;
1513 }
1514
1515 return IEMOP_RAISE_INVALID_OPCODE();
1516}
1517
1518
1519/** Opcode 0x0f 0x19..0x1f. */
1520FNIEMOP_DEF(iemOp_nop_Ev)
1521{
1522 IEMOP_HLP_NO_LOCK_PREFIX();
1523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1525 {
1526 IEM_MC_BEGIN(0, 0);
1527 IEM_MC_ADVANCE_RIP();
1528 IEM_MC_END();
1529 }
1530 else
1531 {
1532 IEM_MC_BEGIN(0, 1);
1533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1535 /* Currently a NOP. */
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 }
1539 return VINF_SUCCESS;
1540}
1541
1542
1543/** Opcode 0x0f 0x20. */
1544FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1545{
1546 /* mod is ignored, as is operand size overrides. */
1547 IEMOP_MNEMONIC("mov Rd,Cd");
1548 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1549 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1550 else
1551 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1552
1553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1554 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1555 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1556 {
1557 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1558 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1559 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1560 iCrReg |= 8;
1561 }
1562 switch (iCrReg)
1563 {
1564 case 0: case 2: case 3: case 4: case 8:
1565 break;
1566 default:
1567 return IEMOP_RAISE_INVALID_OPCODE();
1568 }
1569 IEMOP_HLP_DONE_DECODING();
1570
1571 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1572}
1573
1574
1575/** Opcode 0x0f 0x21. */
1576FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1577{
1578 IEMOP_MNEMONIC("mov Rd,Dd");
1579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1580 IEMOP_HLP_NO_LOCK_PREFIX();
1581 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1582 return IEMOP_RAISE_INVALID_OPCODE();
1583 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1584 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1585 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1586}
1587
1588
1589/** Opcode 0x0f 0x22. */
1590FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1591{
1592 /* mod is ignored, as is operand size overrides. */
1593 IEMOP_MNEMONIC("mov Cd,Rd");
1594 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1595 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1596 else
1597 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1598
1599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1600 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1601 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1602 {
1603 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1604 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1605 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1606 iCrReg |= 8;
1607 }
1608 switch (iCrReg)
1609 {
1610 case 0: case 2: case 3: case 4: case 8:
1611 break;
1612 default:
1613 return IEMOP_RAISE_INVALID_OPCODE();
1614 }
1615 IEMOP_HLP_DONE_DECODING();
1616
1617 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1618}
1619
1620
1621/** Opcode 0x0f 0x23. */
1622FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1623{
1624 IEMOP_MNEMONIC("mov Dd,Rd");
1625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1628 return IEMOP_RAISE_INVALID_OPCODE();
1629 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1630 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1631 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1632}
1633
1634
1635/** Opcode 0x0f 0x24. */
1636FNIEMOP_DEF(iemOp_mov_Rd_Td)
1637{
1638 IEMOP_MNEMONIC("mov Rd,Td");
1639 /* The RM byte is not considered, see testcase. */
1640 return IEMOP_RAISE_INVALID_OPCODE();
1641}
1642
1643
1644/** Opcode 0x0f 0x26. */
1645FNIEMOP_DEF(iemOp_mov_Td_Rd)
1646{
1647 IEMOP_MNEMONIC("mov Td,Rd");
1648 /* The RM byte is not considered, see testcase. */
1649 return IEMOP_RAISE_INVALID_OPCODE();
1650}
1651
1652
1653/** Opcode 0x0f 0x28. */
1654FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1655/** Opcode 0x0f 0x29. */
1656FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1657/** Opcode 0x0f 0x2a. */
1658FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1659/** Opcode 0x0f 0x2b. */
1660FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1661/** Opcode 0x0f 0x2c. */
1662FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1663/** Opcode 0x0f 0x2d. */
1664FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1665/** Opcode 0x0f 0x2e. */
1666FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1667/** Opcode 0x0f 0x2f. */
1668FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1669
1670
1671/** Opcode 0x0f 0x30. */
1672FNIEMOP_DEF(iemOp_wrmsr)
1673{
1674 IEMOP_MNEMONIC("wrmsr");
1675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1676 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1677}
1678
1679
1680/** Opcode 0x0f 0x31. */
1681FNIEMOP_DEF(iemOp_rdtsc)
1682{
1683 IEMOP_MNEMONIC("rdtsc");
1684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1685 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1686}
1687
1688
1689/** Opcode 0x0f 0x33. */
1690FNIEMOP_DEF(iemOp_rdmsr)
1691{
1692 IEMOP_MNEMONIC("rdmsr");
1693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1694 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1695}
1696
1697
1698/** Opcode 0x0f 0x34. */
1699FNIEMOP_STUB(iemOp_rdpmc);
1700/** Opcode 0x0f 0x34. */
1701FNIEMOP_STUB(iemOp_sysenter);
1702/** Opcode 0x0f 0x35. */
1703FNIEMOP_STUB(iemOp_sysexit);
1704/** Opcode 0x0f 0x37. */
1705FNIEMOP_STUB(iemOp_getsec);
1706/** Opcode 0x0f 0x38. */
1707FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1708/** Opcode 0x0f 0x3a. */
1709FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1710/** Opcode 0x0f 0x3c (?). */
1711FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1712
1713/**
1714 * Implements a conditional move.
1715 *
1716 * Wish there was an obvious way to do this where we could share and reduce
1717 * code bloat.
1718 *
1719 * @param a_Cnd The conditional "microcode" operation.
1720 */
1721#define CMOV_X(a_Cnd) \
1722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1724 { \
1725 switch (pIemCpu->enmEffOpSize) \
1726 { \
1727 case IEMMODE_16BIT: \
1728 IEM_MC_BEGIN(0, 1); \
1729 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1730 a_Cnd { \
1731 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1732 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1733 } IEM_MC_ENDIF(); \
1734 IEM_MC_ADVANCE_RIP(); \
1735 IEM_MC_END(); \
1736 return VINF_SUCCESS; \
1737 \
1738 case IEMMODE_32BIT: \
1739 IEM_MC_BEGIN(0, 1); \
1740 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1741 a_Cnd { \
1742 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1743 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1744 } IEM_MC_ELSE() { \
1745 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1746 } IEM_MC_ENDIF(); \
1747 IEM_MC_ADVANCE_RIP(); \
1748 IEM_MC_END(); \
1749 return VINF_SUCCESS; \
1750 \
1751 case IEMMODE_64BIT: \
1752 IEM_MC_BEGIN(0, 1); \
1753 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1754 a_Cnd { \
1755 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1756 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1757 } IEM_MC_ENDIF(); \
1758 IEM_MC_ADVANCE_RIP(); \
1759 IEM_MC_END(); \
1760 return VINF_SUCCESS; \
1761 \
1762 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1763 } \
1764 } \
1765 else \
1766 { \
1767 switch (pIemCpu->enmEffOpSize) \
1768 { \
1769 case IEMMODE_16BIT: \
1770 IEM_MC_BEGIN(0, 2); \
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1772 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1774 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1775 a_Cnd { \
1776 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1777 } IEM_MC_ENDIF(); \
1778 IEM_MC_ADVANCE_RIP(); \
1779 IEM_MC_END(); \
1780 return VINF_SUCCESS; \
1781 \
1782 case IEMMODE_32BIT: \
1783 IEM_MC_BEGIN(0, 2); \
1784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1785 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1787 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1788 a_Cnd { \
1789 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1790 } IEM_MC_ELSE() { \
1791 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1792 } IEM_MC_ENDIF(); \
1793 IEM_MC_ADVANCE_RIP(); \
1794 IEM_MC_END(); \
1795 return VINF_SUCCESS; \
1796 \
1797 case IEMMODE_64BIT: \
1798 IEM_MC_BEGIN(0, 2); \
1799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1800 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1802 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1803 a_Cnd { \
1804 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1805 } IEM_MC_ENDIF(); \
1806 IEM_MC_ADVANCE_RIP(); \
1807 IEM_MC_END(); \
1808 return VINF_SUCCESS; \
1809 \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 } do {} while (0)
1813
1814
1815
1816/** Opcode 0x0f 0x40. */
1817FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1818{
1819 IEMOP_MNEMONIC("cmovo Gv,Ev");
1820 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1821}
1822
1823
1824/** Opcode 0x0f 0x41. */
1825FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1826{
1827 IEMOP_MNEMONIC("cmovno Gv,Ev");
1828 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1829}
1830
1831
1832/** Opcode 0x0f 0x42. */
1833FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1834{
1835 IEMOP_MNEMONIC("cmovc Gv,Ev");
1836 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1837}
1838
1839
1840/** Opcode 0x0f 0x43. */
1841FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1842{
1843 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1844 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1845}
1846
1847
1848/** Opcode 0x0f 0x44. */
1849FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1850{
1851 IEMOP_MNEMONIC("cmove Gv,Ev");
1852 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1853}
1854
1855
1856/** Opcode 0x0f 0x45. */
1857FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1858{
1859 IEMOP_MNEMONIC("cmovne Gv,Ev");
1860 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1861}
1862
1863
1864/** Opcode 0x0f 0x46. */
1865FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1866{
1867 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1868 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1869}
1870
1871
1872/** Opcode 0x0f 0x47. */
1873FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1874{
1875 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1876 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1877}
1878
1879
1880/** Opcode 0x0f 0x48. */
1881FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1882{
1883 IEMOP_MNEMONIC("cmovs Gv,Ev");
1884 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1885}
1886
1887
1888/** Opcode 0x0f 0x49. */
1889FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1890{
1891 IEMOP_MNEMONIC("cmovns Gv,Ev");
1892 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1893}
1894
1895
1896/** Opcode 0x0f 0x4a. */
1897FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1898{
1899 IEMOP_MNEMONIC("cmovp Gv,Ev");
1900 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1901}
1902
1903
1904/** Opcode 0x0f 0x4b. */
1905FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1906{
1907 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1908 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1909}
1910
1911
1912/** Opcode 0x0f 0x4c. */
1913FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1914{
1915 IEMOP_MNEMONIC("cmovl Gv,Ev");
1916 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1917}
1918
1919
1920/** Opcode 0x0f 0x4d. */
1921FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1922{
1923 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1924 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1925}
1926
1927
1928/** Opcode 0x0f 0x4e. */
1929FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1930{
1931 IEMOP_MNEMONIC("cmovle Gv,Ev");
1932 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1933}
1934
1935
1936/** Opcode 0x0f 0x4f. */
1937FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1938{
1939 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1940 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1941}
1942
1943#undef CMOV_X
1944
1945/** Opcode 0x0f 0x50. */
1946FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1947/** Opcode 0x0f 0x51. */
1948FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1949/** Opcode 0x0f 0x52. */
1950FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1951/** Opcode 0x0f 0x53. */
1952FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1953/** Opcode 0x0f 0x54. */
1954FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1955/** Opcode 0x0f 0x55. */
1956FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1957/** Opcode 0x0f 0x56. */
1958FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1959/** Opcode 0x0f 0x57. */
1960FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1961/** Opcode 0x0f 0x58. */
1962FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
1963/** Opcode 0x0f 0x59. */
1964FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
1965/** Opcode 0x0f 0x5a. */
1966FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1967/** Opcode 0x0f 0x5b. */
1968FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1969/** Opcode 0x0f 0x5c. */
1970FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1971/** Opcode 0x0f 0x5d. */
1972FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1973/** Opcode 0x0f 0x5e. */
1974FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1975/** Opcode 0x0f 0x5f. */
1976FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1977
1978
1979/**
1980 * Common worker for SSE2 and MMX instructions on the forms:
1981 * pxxxx xmm1, xmm2/mem128
1982 * pxxxx mm1, mm2/mem32
1983 *
1984 * The 2nd operand is the first half of a register, which in the memory case
1985 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1986 * memory accessed for MMX.
1987 *
1988 * Exceptions type 4.
1989 */
1990FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
1991{
1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1993 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1994 {
1995 case IEM_OP_PRF_SIZE_OP: /* SSE */
1996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1997 {
1998 /*
1999 * Register, register.
2000 */
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2002 IEM_MC_BEGIN(2, 0);
2003 IEM_MC_ARG(uint128_t *, pDst, 0);
2004 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2006 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2007 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2008 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2009 IEM_MC_ADVANCE_RIP();
2010 IEM_MC_END();
2011 }
2012 else
2013 {
2014 /*
2015 * Register, memory.
2016 */
2017 IEM_MC_BEGIN(2, 2);
2018 IEM_MC_ARG(uint128_t *, pDst, 0);
2019 IEM_MC_LOCAL(uint64_t, uSrc);
2020 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2025 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2026 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2027
2028 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2029 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2030
2031 IEM_MC_ADVANCE_RIP();
2032 IEM_MC_END();
2033 }
2034 return VINF_SUCCESS;
2035
2036 case 0: /* MMX */
2037 if (!pImpl->pfnU64)
2038 return IEMOP_RAISE_INVALID_OPCODE();
2039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2040 {
2041 /*
2042 * Register, register.
2043 */
2044 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2045 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2047 IEM_MC_BEGIN(2, 0);
2048 IEM_MC_ARG(uint64_t *, pDst, 0);
2049 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2050 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2051 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2052 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2053 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2054 IEM_MC_ADVANCE_RIP();
2055 IEM_MC_END();
2056 }
2057 else
2058 {
2059 /*
2060 * Register, memory.
2061 */
2062 IEM_MC_BEGIN(2, 2);
2063 IEM_MC_ARG(uint64_t *, pDst, 0);
2064 IEM_MC_LOCAL(uint32_t, uSrc);
2065 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2067
2068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2071 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2072
2073 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2074 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2075
2076 IEM_MC_ADVANCE_RIP();
2077 IEM_MC_END();
2078 }
2079 return VINF_SUCCESS;
2080
2081 default:
2082 return IEMOP_RAISE_INVALID_OPCODE();
2083 }
2084}
2085
2086
2087/** Opcode 0x0f 0x60. */
2088FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2089{
2090 IEMOP_MNEMONIC("punpcklbw");
2091 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2092}
2093
2094
2095/** Opcode 0x0f 0x61. */
2096FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2097{
2098 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2099 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2100}
2101
2102
2103/** Opcode 0x0f 0x62. */
2104FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2105{
2106 IEMOP_MNEMONIC("punpckldq");
2107 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2108}
2109
2110
2111/** Opcode 0x0f 0x63. */
2112FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2113/** Opcode 0x0f 0x64. */
2114FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2115/** Opcode 0x0f 0x65. */
2116FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2117/** Opcode 0x0f 0x66. */
2118FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2119/** Opcode 0x0f 0x67. */
2120FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2121
2122
2123/**
2124 * Common worker for SSE2 and MMX instructions on the forms:
2125 * pxxxx xmm1, xmm2/mem128
2126 * pxxxx mm1, mm2/mem64
2127 *
2128 * The 2nd operand is the second half of a register, which in the memory case
2129 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2130 * where it may read the full 128 bits or only the upper 64 bits.
2131 *
2132 * Exceptions type 4.
2133 */
2134FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2135{
2136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2137 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2138 {
2139 case IEM_OP_PRF_SIZE_OP: /* SSE */
2140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2141 {
2142 /*
2143 * Register, register.
2144 */
2145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2146 IEM_MC_BEGIN(2, 0);
2147 IEM_MC_ARG(uint128_t *, pDst, 0);
2148 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2150 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2151 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2152 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2153 IEM_MC_ADVANCE_RIP();
2154 IEM_MC_END();
2155 }
2156 else
2157 {
2158 /*
2159 * Register, memory.
2160 */
2161 IEM_MC_BEGIN(2, 2);
2162 IEM_MC_ARG(uint128_t *, pDst, 0);
2163 IEM_MC_LOCAL(uint128_t, uSrc);
2164 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2166
2167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2169 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2170 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2171
2172 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2173 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2174
2175 IEM_MC_ADVANCE_RIP();
2176 IEM_MC_END();
2177 }
2178 return VINF_SUCCESS;
2179
2180 case 0: /* MMX */
2181 if (!pImpl->pfnU64)
2182 return IEMOP_RAISE_INVALID_OPCODE();
2183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2184 {
2185 /*
2186 * Register, register.
2187 */
2188 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2189 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2191 IEM_MC_BEGIN(2, 0);
2192 IEM_MC_ARG(uint64_t *, pDst, 0);
2193 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2194 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2195 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2196 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2197 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2198 IEM_MC_ADVANCE_RIP();
2199 IEM_MC_END();
2200 }
2201 else
2202 {
2203 /*
2204 * Register, memory.
2205 */
2206 IEM_MC_BEGIN(2, 2);
2207 IEM_MC_ARG(uint64_t *, pDst, 0);
2208 IEM_MC_LOCAL(uint64_t, uSrc);
2209 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2211
2212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2215 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2216
2217 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2218 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2219
2220 IEM_MC_ADVANCE_RIP();
2221 IEM_MC_END();
2222 }
2223 return VINF_SUCCESS;
2224
2225 default:
2226 return IEMOP_RAISE_INVALID_OPCODE();
2227 }
2228}
2229
2230
2231/** Opcode 0x0f 0x68. */
2232FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2233{
2234 IEMOP_MNEMONIC("punpckhbw");
2235 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2236}
2237
2238
2239/** Opcode 0x0f 0x69. */
2240FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2241{
2242 IEMOP_MNEMONIC("punpckhwd");
2243 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2244}
2245
2246
2247/** Opcode 0x0f 0x6a. */
2248FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2249{
2250 IEMOP_MNEMONIC("punpckhdq");
2251 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2252}
2253
2254/** Opcode 0x0f 0x6b. */
2255FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2256
2257
2258/** Opcode 0x0f 0x6c. */
2259FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2260{
2261 IEMOP_MNEMONIC("punpcklqdq");
2262 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2263}
2264
2265
2266/** Opcode 0x0f 0x6d. */
2267FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2268{
2269 IEMOP_MNEMONIC("punpckhqdq");
2270 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2271}
2272
2273
2274/** Opcode 0x0f 0x6e. */
2275FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2276{
2277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2278 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2279 {
2280 case IEM_OP_PRF_SIZE_OP: /* SSE */
2281 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2283 {
2284 /* XMM, greg*/
2285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2286 IEM_MC_BEGIN(0, 1);
2287 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2288 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2289 {
2290 IEM_MC_LOCAL(uint64_t, u64Tmp);
2291 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2292 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2293 }
2294 else
2295 {
2296 IEM_MC_LOCAL(uint32_t, u32Tmp);
2297 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2298 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2299 }
2300 IEM_MC_ADVANCE_RIP();
2301 IEM_MC_END();
2302 }
2303 else
2304 {
2305 /* XMM, [mem] */
2306 IEM_MC_BEGIN(0, 2);
2307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2308 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2311 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2312 {
2313 IEM_MC_LOCAL(uint64_t, u64Tmp);
2314 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2315 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2316 }
2317 else
2318 {
2319 IEM_MC_LOCAL(uint32_t, u32Tmp);
2320 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2321 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2322 }
2323 IEM_MC_ADVANCE_RIP();
2324 IEM_MC_END();
2325 }
2326 return VINF_SUCCESS;
2327
2328 case 0: /* MMX */
2329 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2331 {
2332 /* MMX, greg */
2333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2334 IEM_MC_BEGIN(0, 1);
2335 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2336 IEM_MC_LOCAL(uint64_t, u64Tmp);
2337 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2338 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2339 else
2340 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2341 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2342 IEM_MC_ADVANCE_RIP();
2343 IEM_MC_END();
2344 }
2345 else
2346 {
2347 /* MMX, [mem] */
2348 IEM_MC_BEGIN(0, 2);
2349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2350 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2353 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2354 {
2355 IEM_MC_LOCAL(uint64_t, u64Tmp);
2356 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2357 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2358 }
2359 else
2360 {
2361 IEM_MC_LOCAL(uint32_t, u32Tmp);
2362 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2363 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2364 }
2365 IEM_MC_ADVANCE_RIP();
2366 IEM_MC_END();
2367 }
2368 return VINF_SUCCESS;
2369
2370 default:
2371 return IEMOP_RAISE_INVALID_OPCODE();
2372 }
2373}
2374
2375
2376/** Opcode 0x0f 0x6f. */
2377FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2378{
2379 bool fAligned = false;
2380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2381 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2382 {
2383 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2384 fAligned = true;
2385 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2386 if (fAligned)
2387 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2388 else
2389 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2391 {
2392 /*
2393 * Register, register.
2394 */
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_BEGIN(0, 1);
2397 IEM_MC_LOCAL(uint128_t, u128Tmp);
2398 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2399 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2400 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2401 IEM_MC_ADVANCE_RIP();
2402 IEM_MC_END();
2403 }
2404 else
2405 {
2406 /*
2407 * Register, memory.
2408 */
2409 IEM_MC_BEGIN(0, 2);
2410 IEM_MC_LOCAL(uint128_t, u128Tmp);
2411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2412
2413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2415 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2416 if (fAligned)
2417 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2418 else
2419 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2420 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2421
2422 IEM_MC_ADVANCE_RIP();
2423 IEM_MC_END();
2424 }
2425 return VINF_SUCCESS;
2426
2427 case 0: /* MMX */
2428 IEMOP_MNEMONIC("movq Pq,Qq");
2429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2430 {
2431 /*
2432 * Register, register.
2433 */
2434 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2435 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2437 IEM_MC_BEGIN(0, 1);
2438 IEM_MC_LOCAL(uint64_t, u64Tmp);
2439 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2440 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2441 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 else
2446 {
2447 /*
2448 * Register, memory.
2449 */
2450 IEM_MC_BEGIN(0, 2);
2451 IEM_MC_LOCAL(uint64_t, u64Tmp);
2452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2453
2454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2457 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2458 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2459
2460 IEM_MC_ADVANCE_RIP();
2461 IEM_MC_END();
2462 }
2463 return VINF_SUCCESS;
2464
2465 default:
2466 return IEMOP_RAISE_INVALID_OPCODE();
2467 }
2468}
2469
2470
2471/** Opcode 0x0f 0x70. The immediate here is evil! */
2472FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2473{
2474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2475 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2476 {
2477 case IEM_OP_PRF_SIZE_OP: /* SSE */
2478 case IEM_OP_PRF_REPNZ: /* SSE */
2479 case IEM_OP_PRF_REPZ: /* SSE */
2480 {
2481 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2482 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2483 {
2484 case IEM_OP_PRF_SIZE_OP:
2485 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2486 pfnAImpl = iemAImpl_pshufd;
2487 break;
2488 case IEM_OP_PRF_REPNZ:
2489 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2490 pfnAImpl = iemAImpl_pshuflw;
2491 break;
2492 case IEM_OP_PRF_REPZ:
2493 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2494 pfnAImpl = iemAImpl_pshufhw;
2495 break;
2496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2497 }
2498 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2499 {
2500 /*
2501 * Register, register.
2502 */
2503 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2505
2506 IEM_MC_BEGIN(3, 0);
2507 IEM_MC_ARG(uint128_t *, pDst, 0);
2508 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2509 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2510 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2511 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2512 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2513 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 else
2518 {
2519 /*
2520 * Register, memory.
2521 */
2522 IEM_MC_BEGIN(3, 2);
2523 IEM_MC_ARG(uint128_t *, pDst, 0);
2524 IEM_MC_LOCAL(uint128_t, uSrc);
2525 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2527
2528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2529 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2530 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2533
2534 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2535 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2536 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 return VINF_SUCCESS;
2542 }
2543
2544 case 0: /* MMX Extension */
2545 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2547 {
2548 /*
2549 * Register, register.
2550 */
2551 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2553
2554 IEM_MC_BEGIN(3, 0);
2555 IEM_MC_ARG(uint64_t *, pDst, 0);
2556 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2557 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2558 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2559 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2560 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2561 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2562 IEM_MC_ADVANCE_RIP();
2563 IEM_MC_END();
2564 }
2565 else
2566 {
2567 /*
2568 * Register, memory.
2569 */
2570 IEM_MC_BEGIN(3, 2);
2571 IEM_MC_ARG(uint64_t *, pDst, 0);
2572 IEM_MC_LOCAL(uint64_t, uSrc);
2573 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2575
2576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2577 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2578 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2581
2582 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2583 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2584 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2585
2586 IEM_MC_ADVANCE_RIP();
2587 IEM_MC_END();
2588 }
2589 return VINF_SUCCESS;
2590
2591 default:
2592 return IEMOP_RAISE_INVALID_OPCODE();
2593 }
2594}
2595
2596
2597/** Opcode 0x0f 0x71 11/2. */
2598FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2599
2600/** Opcode 0x66 0x0f 0x71 11/2. */
2601FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2602
2603/** Opcode 0x0f 0x71 11/4. */
2604FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2605
2606/** Opcode 0x66 0x0f 0x71 11/4. */
2607FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2608
2609/** Opcode 0x0f 0x71 11/6. */
2610FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2611
2612/** Opcode 0x66 0x0f 0x71 11/6. */
2613FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2614
2615
2616/** Opcode 0x0f 0x71. */
2617FNIEMOP_DEF(iemOp_Grp12)
2618{
2619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2620 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2621 return IEMOP_RAISE_INVALID_OPCODE();
2622 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2623 {
2624 case 0: case 1: case 3: case 5: case 7:
2625 return IEMOP_RAISE_INVALID_OPCODE();
2626 case 2:
2627 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2628 {
2629 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2630 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2631 default: return IEMOP_RAISE_INVALID_OPCODE();
2632 }
2633 case 4:
2634 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2635 {
2636 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2637 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2638 default: return IEMOP_RAISE_INVALID_OPCODE();
2639 }
2640 case 6:
2641 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2642 {
2643 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2644 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2645 default: return IEMOP_RAISE_INVALID_OPCODE();
2646 }
2647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2648 }
2649}
2650
2651
2652/** Opcode 0x0f 0x72 11/2. */
2653FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2654
2655/** Opcode 0x66 0x0f 0x72 11/2. */
2656FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2657
2658/** Opcode 0x0f 0x72 11/4. */
2659FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2660
2661/** Opcode 0x66 0x0f 0x72 11/4. */
2662FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2663
2664/** Opcode 0x0f 0x72 11/6. */
2665FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2666
2667/** Opcode 0x66 0x0f 0x72 11/6. */
2668FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2669
2670
2671/** Opcode 0x0f 0x72. */
2672FNIEMOP_DEF(iemOp_Grp13)
2673{
2674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2676 return IEMOP_RAISE_INVALID_OPCODE();
2677 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2678 {
2679 case 0: case 1: case 3: case 5: case 7:
2680 return IEMOP_RAISE_INVALID_OPCODE();
2681 case 2:
2682 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2683 {
2684 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2685 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2686 default: return IEMOP_RAISE_INVALID_OPCODE();
2687 }
2688 case 4:
2689 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2690 {
2691 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2692 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2693 default: return IEMOP_RAISE_INVALID_OPCODE();
2694 }
2695 case 6:
2696 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2697 {
2698 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2699 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2700 default: return IEMOP_RAISE_INVALID_OPCODE();
2701 }
2702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2703 }
2704}
2705
2706
2707/** Opcode 0x0f 0x73 11/2. */
2708FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2709
2710/** Opcode 0x66 0x0f 0x73 11/2. */
2711FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2712
2713/** Opcode 0x66 0x0f 0x73 11/3. */
2714FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2715
2716/** Opcode 0x0f 0x73 11/6. */
2717FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2718
2719/** Opcode 0x66 0x0f 0x73 11/6. */
2720FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2721
2722/** Opcode 0x66 0x0f 0x73 11/7. */
2723FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2724
2725
2726/** Opcode 0x0f 0x73. */
2727FNIEMOP_DEF(iemOp_Grp14)
2728{
2729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2730 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2731 return IEMOP_RAISE_INVALID_OPCODE();
2732 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2733 {
2734 case 0: case 1: case 4: case 5:
2735 return IEMOP_RAISE_INVALID_OPCODE();
2736 case 2:
2737 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2738 {
2739 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2740 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2741 default: return IEMOP_RAISE_INVALID_OPCODE();
2742 }
2743 case 3:
2744 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2745 {
2746 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2747 default: return IEMOP_RAISE_INVALID_OPCODE();
2748 }
2749 case 6:
2750 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2751 {
2752 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2753 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2754 default: return IEMOP_RAISE_INVALID_OPCODE();
2755 }
2756 case 7:
2757 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2758 {
2759 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2760 default: return IEMOP_RAISE_INVALID_OPCODE();
2761 }
2762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2763 }
2764}
2765
2766
2767/**
2768 * Common worker for SSE2 and MMX instructions on the forms:
2769 * pxxx mm1, mm2/mem64
2770 * pxxx xmm1, xmm2/mem128
2771 *
2772 * Proper alignment of the 128-bit operand is enforced.
2773 * Exceptions type 4. SSE2 and MMX cpuid checks.
2774 */
2775FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2776{
2777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2778 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2779 {
2780 case IEM_OP_PRF_SIZE_OP: /* SSE */
2781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2782 {
2783 /*
2784 * Register, register.
2785 */
2786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2787 IEM_MC_BEGIN(2, 0);
2788 IEM_MC_ARG(uint128_t *, pDst, 0);
2789 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2790 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2791 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2792 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2793 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2794 IEM_MC_ADVANCE_RIP();
2795 IEM_MC_END();
2796 }
2797 else
2798 {
2799 /*
2800 * Register, memory.
2801 */
2802 IEM_MC_BEGIN(2, 2);
2803 IEM_MC_ARG(uint128_t *, pDst, 0);
2804 IEM_MC_LOCAL(uint128_t, uSrc);
2805 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2807
2808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2810 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2811 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2812
2813 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2814 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2815
2816 IEM_MC_ADVANCE_RIP();
2817 IEM_MC_END();
2818 }
2819 return VINF_SUCCESS;
2820
2821 case 0: /* MMX */
2822 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2823 {
2824 /*
2825 * Register, register.
2826 */
2827 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2828 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2830 IEM_MC_BEGIN(2, 0);
2831 IEM_MC_ARG(uint64_t *, pDst, 0);
2832 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2833 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2834 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2835 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2836 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2837 IEM_MC_ADVANCE_RIP();
2838 IEM_MC_END();
2839 }
2840 else
2841 {
2842 /*
2843 * Register, memory.
2844 */
2845 IEM_MC_BEGIN(2, 2);
2846 IEM_MC_ARG(uint64_t *, pDst, 0);
2847 IEM_MC_LOCAL(uint64_t, uSrc);
2848 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2850
2851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2854 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2855
2856 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2857 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2858
2859 IEM_MC_ADVANCE_RIP();
2860 IEM_MC_END();
2861 }
2862 return VINF_SUCCESS;
2863
2864 default:
2865 return IEMOP_RAISE_INVALID_OPCODE();
2866 }
2867}
2868
2869
2870/** Opcode 0x0f 0x74. */
2871FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2872{
2873 IEMOP_MNEMONIC("pcmpeqb");
2874 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2875}
2876
2877
2878/** Opcode 0x0f 0x75. */
2879FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2880{
2881 IEMOP_MNEMONIC("pcmpeqw");
2882 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2883}
2884
2885
2886/** Opcode 0x0f 0x76. */
2887FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2888{
2889 IEMOP_MNEMONIC("pcmpeqd");
2890 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2891}
2892
2893
2894/** Opcode 0x0f 0x77. */
2895FNIEMOP_STUB(iemOp_emms);
2896/** Opcode 0x0f 0x78. */
2897FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2898/** Opcode 0x0f 0x79. */
2899FNIEMOP_UD_STUB(iemOp_vmwrite);
2900/** Opcode 0x0f 0x7c. */
2901FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2902/** Opcode 0x0f 0x7d. */
2903FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2904
2905
2906/** Opcode 0x0f 0x7e. */
2907FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2908{
2909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2910 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2911 {
2912 case IEM_OP_PRF_SIZE_OP: /* SSE */
2913 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2915 {
2916 /* greg, XMM */
2917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2918 IEM_MC_BEGIN(0, 1);
2919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2920 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2921 {
2922 IEM_MC_LOCAL(uint64_t, u64Tmp);
2923 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2924 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2925 }
2926 else
2927 {
2928 IEM_MC_LOCAL(uint32_t, u32Tmp);
2929 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2930 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2931 }
2932 IEM_MC_ADVANCE_RIP();
2933 IEM_MC_END();
2934 }
2935 else
2936 {
2937 /* [mem], XMM */
2938 IEM_MC_BEGIN(0, 2);
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2940 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2943 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2944 {
2945 IEM_MC_LOCAL(uint64_t, u64Tmp);
2946 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2947 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2948 }
2949 else
2950 {
2951 IEM_MC_LOCAL(uint32_t, u32Tmp);
2952 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2953 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2954 }
2955 IEM_MC_ADVANCE_RIP();
2956 IEM_MC_END();
2957 }
2958 return VINF_SUCCESS;
2959
2960 case 0: /* MMX */
2961 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2963 {
2964 /* greg, MMX */
2965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2966 IEM_MC_BEGIN(0, 1);
2967 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2968 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2969 {
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2972 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2973 }
2974 else
2975 {
2976 IEM_MC_LOCAL(uint32_t, u32Tmp);
2977 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2978 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2979 }
2980 IEM_MC_ADVANCE_RIP();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* [mem], MMX */
2986 IEM_MC_BEGIN(0, 2);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2988 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2992 {
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2995 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2996 }
2997 else
2998 {
2999 IEM_MC_LOCAL(uint32_t, u32Tmp);
3000 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3001 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3002 }
3003 IEM_MC_ADVANCE_RIP();
3004 IEM_MC_END();
3005 }
3006 return VINF_SUCCESS;
3007
3008 default:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 }
3011}
3012
3013
3014/** Opcode 0x0f 0x7f. */
3015FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3016{
3017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3018 bool fAligned = false;
3019 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3020 {
3021 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3022 fAligned = true;
3023 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3024 if (fAligned)
3025 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3026 else
3027 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3029 {
3030 /*
3031 * Register, register.
3032 */
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 IEM_MC_BEGIN(0, 1);
3035 IEM_MC_LOCAL(uint128_t, u128Tmp);
3036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3037 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3038 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3039 IEM_MC_ADVANCE_RIP();
3040 IEM_MC_END();
3041 }
3042 else
3043 {
3044 /*
3045 * Register, memory.
3046 */
3047 IEM_MC_BEGIN(0, 2);
3048 IEM_MC_LOCAL(uint128_t, u128Tmp);
3049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3050
3051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3054 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3055 if (fAligned)
3056 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3057 else
3058 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3059
3060 IEM_MC_ADVANCE_RIP();
3061 IEM_MC_END();
3062 }
3063 return VINF_SUCCESS;
3064
3065 case 0: /* MMX */
3066 IEMOP_MNEMONIC("movq Qq,Pq");
3067
3068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3069 {
3070 /*
3071 * Register, register.
3072 */
3073 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3074 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_BEGIN(0, 1);
3077 IEM_MC_LOCAL(uint64_t, u64Tmp);
3078 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3079 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3080 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3081 IEM_MC_ADVANCE_RIP();
3082 IEM_MC_END();
3083 }
3084 else
3085 {
3086 /*
3087 * Register, memory.
3088 */
3089 IEM_MC_BEGIN(0, 2);
3090 IEM_MC_LOCAL(uint64_t, u64Tmp);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092
3093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3095 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3096 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3097 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3098
3099 IEM_MC_ADVANCE_RIP();
3100 IEM_MC_END();
3101 }
3102 return VINF_SUCCESS;
3103
3104 default:
3105 return IEMOP_RAISE_INVALID_OPCODE();
3106 }
3107}
3108
3109
3110
3111/** Opcode 0x0f 0x80. */
3112FNIEMOP_DEF(iemOp_jo_Jv)
3113{
3114 IEMOP_MNEMONIC("jo Jv");
3115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3116 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3117 {
3118 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3119 IEMOP_HLP_NO_LOCK_PREFIX();
3120
3121 IEM_MC_BEGIN(0, 0);
3122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3123 IEM_MC_REL_JMP_S16(i16Imm);
3124 } IEM_MC_ELSE() {
3125 IEM_MC_ADVANCE_RIP();
3126 } IEM_MC_ENDIF();
3127 IEM_MC_END();
3128 }
3129 else
3130 {
3131 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3132 IEMOP_HLP_NO_LOCK_PREFIX();
3133
3134 IEM_MC_BEGIN(0, 0);
3135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3136 IEM_MC_REL_JMP_S32(i32Imm);
3137 } IEM_MC_ELSE() {
3138 IEM_MC_ADVANCE_RIP();
3139 } IEM_MC_ENDIF();
3140 IEM_MC_END();
3141 }
3142 return VINF_SUCCESS;
3143}
3144
3145
3146/** Opcode 0x0f 0x81. */
3147FNIEMOP_DEF(iemOp_jno_Jv)
3148{
3149 IEMOP_MNEMONIC("jno Jv");
3150 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3151 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3152 {
3153 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3154 IEMOP_HLP_NO_LOCK_PREFIX();
3155
3156 IEM_MC_BEGIN(0, 0);
3157 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3158 IEM_MC_ADVANCE_RIP();
3159 } IEM_MC_ELSE() {
3160 IEM_MC_REL_JMP_S16(i16Imm);
3161 } IEM_MC_ENDIF();
3162 IEM_MC_END();
3163 }
3164 else
3165 {
3166 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3167 IEMOP_HLP_NO_LOCK_PREFIX();
3168
3169 IEM_MC_BEGIN(0, 0);
3170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3171 IEM_MC_ADVANCE_RIP();
3172 } IEM_MC_ELSE() {
3173 IEM_MC_REL_JMP_S32(i32Imm);
3174 } IEM_MC_ENDIF();
3175 IEM_MC_END();
3176 }
3177 return VINF_SUCCESS;
3178}
3179
3180
3181/** Opcode 0x0f 0x82. */
3182FNIEMOP_DEF(iemOp_jc_Jv)
3183{
3184 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3186 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3187 {
3188 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3189 IEMOP_HLP_NO_LOCK_PREFIX();
3190
3191 IEM_MC_BEGIN(0, 0);
3192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3193 IEM_MC_REL_JMP_S16(i16Imm);
3194 } IEM_MC_ELSE() {
3195 IEM_MC_ADVANCE_RIP();
3196 } IEM_MC_ENDIF();
3197 IEM_MC_END();
3198 }
3199 else
3200 {
3201 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3202 IEMOP_HLP_NO_LOCK_PREFIX();
3203
3204 IEM_MC_BEGIN(0, 0);
3205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3206 IEM_MC_REL_JMP_S32(i32Imm);
3207 } IEM_MC_ELSE() {
3208 IEM_MC_ADVANCE_RIP();
3209 } IEM_MC_ENDIF();
3210 IEM_MC_END();
3211 }
3212 return VINF_SUCCESS;
3213}
3214
3215
3216/** Opcode 0x0f 0x83. */
3217FNIEMOP_DEF(iemOp_jnc_Jv)
3218{
3219 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3221 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3222 {
3223 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3224 IEMOP_HLP_NO_LOCK_PREFIX();
3225
3226 IEM_MC_BEGIN(0, 0);
3227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3228 IEM_MC_ADVANCE_RIP();
3229 } IEM_MC_ELSE() {
3230 IEM_MC_REL_JMP_S16(i16Imm);
3231 } IEM_MC_ENDIF();
3232 IEM_MC_END();
3233 }
3234 else
3235 {
3236 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3237 IEMOP_HLP_NO_LOCK_PREFIX();
3238
3239 IEM_MC_BEGIN(0, 0);
3240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3241 IEM_MC_ADVANCE_RIP();
3242 } IEM_MC_ELSE() {
3243 IEM_MC_REL_JMP_S32(i32Imm);
3244 } IEM_MC_ENDIF();
3245 IEM_MC_END();
3246 }
3247 return VINF_SUCCESS;
3248}
3249
3250
3251/** Opcode 0x0f 0x84. */
3252FNIEMOP_DEF(iemOp_je_Jv)
3253{
3254 IEMOP_MNEMONIC("je/jz Jv");
3255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3256 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3257 {
3258 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3259 IEMOP_HLP_NO_LOCK_PREFIX();
3260
3261 IEM_MC_BEGIN(0, 0);
3262 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3263 IEM_MC_REL_JMP_S16(i16Imm);
3264 } IEM_MC_ELSE() {
3265 IEM_MC_ADVANCE_RIP();
3266 } IEM_MC_ENDIF();
3267 IEM_MC_END();
3268 }
3269 else
3270 {
3271 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3272 IEMOP_HLP_NO_LOCK_PREFIX();
3273
3274 IEM_MC_BEGIN(0, 0);
3275 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3276 IEM_MC_REL_JMP_S32(i32Imm);
3277 } IEM_MC_ELSE() {
3278 IEM_MC_ADVANCE_RIP();
3279 } IEM_MC_ENDIF();
3280 IEM_MC_END();
3281 }
3282 return VINF_SUCCESS;
3283}
3284
3285
3286/** Opcode 0x0f 0x85. */
3287FNIEMOP_DEF(iemOp_jne_Jv)
3288{
3289 IEMOP_MNEMONIC("jne/jnz Jv");
3290 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3291 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3292 {
3293 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3294 IEMOP_HLP_NO_LOCK_PREFIX();
3295
3296 IEM_MC_BEGIN(0, 0);
3297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3298 IEM_MC_ADVANCE_RIP();
3299 } IEM_MC_ELSE() {
3300 IEM_MC_REL_JMP_S16(i16Imm);
3301 } IEM_MC_ENDIF();
3302 IEM_MC_END();
3303 }
3304 else
3305 {
3306 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3307 IEMOP_HLP_NO_LOCK_PREFIX();
3308
3309 IEM_MC_BEGIN(0, 0);
3310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3311 IEM_MC_ADVANCE_RIP();
3312 } IEM_MC_ELSE() {
3313 IEM_MC_REL_JMP_S32(i32Imm);
3314 } IEM_MC_ENDIF();
3315 IEM_MC_END();
3316 }
3317 return VINF_SUCCESS;
3318}
3319
3320
3321/** Opcode 0x0f 0x86. */
3322FNIEMOP_DEF(iemOp_jbe_Jv)
3323{
3324 IEMOP_MNEMONIC("jbe/jna Jv");
3325 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3326 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3327 {
3328 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3329 IEMOP_HLP_NO_LOCK_PREFIX();
3330
3331 IEM_MC_BEGIN(0, 0);
3332 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3333 IEM_MC_REL_JMP_S16(i16Imm);
3334 } IEM_MC_ELSE() {
3335 IEM_MC_ADVANCE_RIP();
3336 } IEM_MC_ENDIF();
3337 IEM_MC_END();
3338 }
3339 else
3340 {
3341 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3342 IEMOP_HLP_NO_LOCK_PREFIX();
3343
3344 IEM_MC_BEGIN(0, 0);
3345 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3346 IEM_MC_REL_JMP_S32(i32Imm);
3347 } IEM_MC_ELSE() {
3348 IEM_MC_ADVANCE_RIP();
3349 } IEM_MC_ENDIF();
3350 IEM_MC_END();
3351 }
3352 return VINF_SUCCESS;
3353}
3354
3355
3356/** Opcode 0x0f 0x87. */
3357FNIEMOP_DEF(iemOp_jnbe_Jv)
3358{
3359 IEMOP_MNEMONIC("jnbe/ja Jv");
3360 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3361 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3362 {
3363 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3364 IEMOP_HLP_NO_LOCK_PREFIX();
3365
3366 IEM_MC_BEGIN(0, 0);
3367 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3368 IEM_MC_ADVANCE_RIP();
3369 } IEM_MC_ELSE() {
3370 IEM_MC_REL_JMP_S16(i16Imm);
3371 } IEM_MC_ENDIF();
3372 IEM_MC_END();
3373 }
3374 else
3375 {
3376 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3377 IEMOP_HLP_NO_LOCK_PREFIX();
3378
3379 IEM_MC_BEGIN(0, 0);
3380 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3381 IEM_MC_ADVANCE_RIP();
3382 } IEM_MC_ELSE() {
3383 IEM_MC_REL_JMP_S32(i32Imm);
3384 } IEM_MC_ENDIF();
3385 IEM_MC_END();
3386 }
3387 return VINF_SUCCESS;
3388}
3389
3390
3391/** Opcode 0x0f 0x88. */
3392FNIEMOP_DEF(iemOp_js_Jv)
3393{
3394 IEMOP_MNEMONIC("js Jv");
3395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3396 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3397 {
3398 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3399 IEMOP_HLP_NO_LOCK_PREFIX();
3400
3401 IEM_MC_BEGIN(0, 0);
3402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3403 IEM_MC_REL_JMP_S16(i16Imm);
3404 } IEM_MC_ELSE() {
3405 IEM_MC_ADVANCE_RIP();
3406 } IEM_MC_ENDIF();
3407 IEM_MC_END();
3408 }
3409 else
3410 {
3411 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3412 IEMOP_HLP_NO_LOCK_PREFIX();
3413
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3416 IEM_MC_REL_JMP_S32(i32Imm);
3417 } IEM_MC_ELSE() {
3418 IEM_MC_ADVANCE_RIP();
3419 } IEM_MC_ENDIF();
3420 IEM_MC_END();
3421 }
3422 return VINF_SUCCESS;
3423}
3424
3425
3426/** Opcode 0x0f 0x89. */
3427FNIEMOP_DEF(iemOp_jns_Jv)
3428{
3429 IEMOP_MNEMONIC("jns Jv");
3430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3431 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3432 {
3433 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3434 IEMOP_HLP_NO_LOCK_PREFIX();
3435
3436 IEM_MC_BEGIN(0, 0);
3437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3438 IEM_MC_ADVANCE_RIP();
3439 } IEM_MC_ELSE() {
3440 IEM_MC_REL_JMP_S16(i16Imm);
3441 } IEM_MC_ENDIF();
3442 IEM_MC_END();
3443 }
3444 else
3445 {
3446 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3447 IEMOP_HLP_NO_LOCK_PREFIX();
3448
3449 IEM_MC_BEGIN(0, 0);
3450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3451 IEM_MC_ADVANCE_RIP();
3452 } IEM_MC_ELSE() {
3453 IEM_MC_REL_JMP_S32(i32Imm);
3454 } IEM_MC_ENDIF();
3455 IEM_MC_END();
3456 }
3457 return VINF_SUCCESS;
3458}
3459
3460
3461/** Opcode 0x0f 0x8a. */
3462FNIEMOP_DEF(iemOp_jp_Jv)
3463{
3464 IEMOP_MNEMONIC("jp Jv");
3465 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3466 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3467 {
3468 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3469 IEMOP_HLP_NO_LOCK_PREFIX();
3470
3471 IEM_MC_BEGIN(0, 0);
3472 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3473 IEM_MC_REL_JMP_S16(i16Imm);
3474 } IEM_MC_ELSE() {
3475 IEM_MC_ADVANCE_RIP();
3476 } IEM_MC_ENDIF();
3477 IEM_MC_END();
3478 }
3479 else
3480 {
3481 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3482 IEMOP_HLP_NO_LOCK_PREFIX();
3483
3484 IEM_MC_BEGIN(0, 0);
3485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3486 IEM_MC_REL_JMP_S32(i32Imm);
3487 } IEM_MC_ELSE() {
3488 IEM_MC_ADVANCE_RIP();
3489 } IEM_MC_ENDIF();
3490 IEM_MC_END();
3491 }
3492 return VINF_SUCCESS;
3493}
3494
3495
3496/** Opcode 0x0f 0x8b. */
3497FNIEMOP_DEF(iemOp_jnp_Jv)
3498{
3499 IEMOP_MNEMONIC("jo Jv");
3500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3501 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3502 {
3503 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3504 IEMOP_HLP_NO_LOCK_PREFIX();
3505
3506 IEM_MC_BEGIN(0, 0);
3507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3508 IEM_MC_ADVANCE_RIP();
3509 } IEM_MC_ELSE() {
3510 IEM_MC_REL_JMP_S16(i16Imm);
3511 } IEM_MC_ENDIF();
3512 IEM_MC_END();
3513 }
3514 else
3515 {
3516 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3517 IEMOP_HLP_NO_LOCK_PREFIX();
3518
3519 IEM_MC_BEGIN(0, 0);
3520 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3521 IEM_MC_ADVANCE_RIP();
3522 } IEM_MC_ELSE() {
3523 IEM_MC_REL_JMP_S32(i32Imm);
3524 } IEM_MC_ENDIF();
3525 IEM_MC_END();
3526 }
3527 return VINF_SUCCESS;
3528}
3529
3530
3531/** Opcode 0x0f 0x8c. */
3532FNIEMOP_DEF(iemOp_jl_Jv)
3533{
3534 IEMOP_MNEMONIC("jl/jnge Jv");
3535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3536 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3537 {
3538 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3539 IEMOP_HLP_NO_LOCK_PREFIX();
3540
3541 IEM_MC_BEGIN(0, 0);
3542 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3543 IEM_MC_REL_JMP_S16(i16Imm);
3544 } IEM_MC_ELSE() {
3545 IEM_MC_ADVANCE_RIP();
3546 } IEM_MC_ENDIF();
3547 IEM_MC_END();
3548 }
3549 else
3550 {
3551 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3552 IEMOP_HLP_NO_LOCK_PREFIX();
3553
3554 IEM_MC_BEGIN(0, 0);
3555 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3556 IEM_MC_REL_JMP_S32(i32Imm);
3557 } IEM_MC_ELSE() {
3558 IEM_MC_ADVANCE_RIP();
3559 } IEM_MC_ENDIF();
3560 IEM_MC_END();
3561 }
3562 return VINF_SUCCESS;
3563}
3564
3565
3566/** Opcode 0x0f 0x8d. */
3567FNIEMOP_DEF(iemOp_jnl_Jv)
3568{
3569 IEMOP_MNEMONIC("jnl/jge Jv");
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3571 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3572 {
3573 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3574 IEMOP_HLP_NO_LOCK_PREFIX();
3575
3576 IEM_MC_BEGIN(0, 0);
3577 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3578 IEM_MC_ADVANCE_RIP();
3579 } IEM_MC_ELSE() {
3580 IEM_MC_REL_JMP_S16(i16Imm);
3581 } IEM_MC_ENDIF();
3582 IEM_MC_END();
3583 }
3584 else
3585 {
3586 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3587 IEMOP_HLP_NO_LOCK_PREFIX();
3588
3589 IEM_MC_BEGIN(0, 0);
3590 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3591 IEM_MC_ADVANCE_RIP();
3592 } IEM_MC_ELSE() {
3593 IEM_MC_REL_JMP_S32(i32Imm);
3594 } IEM_MC_ENDIF();
3595 IEM_MC_END();
3596 }
3597 return VINF_SUCCESS;
3598}
3599
3600
3601/** Opcode 0x0f 0x8e. */
3602FNIEMOP_DEF(iemOp_jle_Jv)
3603{
3604 IEMOP_MNEMONIC("jle/jng Jv");
3605 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3606 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3607 {
3608 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3609 IEMOP_HLP_NO_LOCK_PREFIX();
3610
3611 IEM_MC_BEGIN(0, 0);
3612 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3613 IEM_MC_REL_JMP_S16(i16Imm);
3614 } IEM_MC_ELSE() {
3615 IEM_MC_ADVANCE_RIP();
3616 } IEM_MC_ENDIF();
3617 IEM_MC_END();
3618 }
3619 else
3620 {
3621 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3622 IEMOP_HLP_NO_LOCK_PREFIX();
3623
3624 IEM_MC_BEGIN(0, 0);
3625 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3626 IEM_MC_REL_JMP_S32(i32Imm);
3627 } IEM_MC_ELSE() {
3628 IEM_MC_ADVANCE_RIP();
3629 } IEM_MC_ENDIF();
3630 IEM_MC_END();
3631 }
3632 return VINF_SUCCESS;
3633}
3634
3635
3636/** Opcode 0x0f 0x8f. */
3637FNIEMOP_DEF(iemOp_jnle_Jv)
3638{
3639 IEMOP_MNEMONIC("jnle/jg Jv");
3640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3641 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3642 {
3643 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3644 IEMOP_HLP_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S16(i16Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 else
3655 {
3656 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3657 IEMOP_HLP_NO_LOCK_PREFIX();
3658
3659 IEM_MC_BEGIN(0, 0);
3660 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3661 IEM_MC_ADVANCE_RIP();
3662 } IEM_MC_ELSE() {
3663 IEM_MC_REL_JMP_S32(i32Imm);
3664 } IEM_MC_ENDIF();
3665 IEM_MC_END();
3666 }
3667 return VINF_SUCCESS;
3668}
3669
3670
3671/** Opcode 0x0f 0x90. */
3672FNIEMOP_DEF(iemOp_seto_Eb)
3673{
3674 IEMOP_MNEMONIC("seto Eb");
3675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3676 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3677
3678 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3679 * any way. AMD says it's "unused", whatever that means. We're
3680 * ignoring for now. */
3681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3682 {
3683 /* register target */
3684 IEM_MC_BEGIN(0, 0);
3685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3687 } IEM_MC_ELSE() {
3688 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3689 } IEM_MC_ENDIF();
3690 IEM_MC_ADVANCE_RIP();
3691 IEM_MC_END();
3692 }
3693 else
3694 {
3695 /* memory target */
3696 IEM_MC_BEGIN(0, 1);
3697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3700 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3701 } IEM_MC_ELSE() {
3702 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3703 } IEM_MC_ENDIF();
3704 IEM_MC_ADVANCE_RIP();
3705 IEM_MC_END();
3706 }
3707 return VINF_SUCCESS;
3708}
3709
3710
3711/** Opcode 0x0f 0x91. */
3712FNIEMOP_DEF(iemOp_setno_Eb)
3713{
3714 IEMOP_MNEMONIC("setno Eb");
3715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3716 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3717
3718 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3719 * any way. AMD says it's "unused", whatever that means. We're
3720 * ignoring for now. */
3721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3722 {
3723 /* register target */
3724 IEM_MC_BEGIN(0, 0);
3725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3727 } IEM_MC_ELSE() {
3728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3729 } IEM_MC_ENDIF();
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 /* memory target */
3736 IEM_MC_BEGIN(0, 1);
3737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3740 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3741 } IEM_MC_ELSE() {
3742 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3743 } IEM_MC_ENDIF();
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 }
3747 return VINF_SUCCESS;
3748}
3749
3750
3751/** Opcode 0x0f 0x92. */
3752FNIEMOP_DEF(iemOp_setc_Eb)
3753{
3754 IEMOP_MNEMONIC("setc Eb");
3755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3756 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3757
3758 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3759 * any way. AMD says it's "unused", whatever that means. We're
3760 * ignoring for now. */
3761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3762 {
3763 /* register target */
3764 IEM_MC_BEGIN(0, 0);
3765 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3767 } IEM_MC_ELSE() {
3768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3769 } IEM_MC_ENDIF();
3770 IEM_MC_ADVANCE_RIP();
3771 IEM_MC_END();
3772 }
3773 else
3774 {
3775 /* memory target */
3776 IEM_MC_BEGIN(0, 1);
3777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3780 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3781 } IEM_MC_ELSE() {
3782 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3783 } IEM_MC_ENDIF();
3784 IEM_MC_ADVANCE_RIP();
3785 IEM_MC_END();
3786 }
3787 return VINF_SUCCESS;
3788}
3789
3790
3791/** Opcode 0x0f 0x93. */
3792FNIEMOP_DEF(iemOp_setnc_Eb)
3793{
3794 IEMOP_MNEMONIC("setnc Eb");
3795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3796 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3797
3798 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3799 * any way. AMD says it's "unused", whatever that means. We're
3800 * ignoring for now. */
3801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3802 {
3803 /* register target */
3804 IEM_MC_BEGIN(0, 0);
3805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3807 } IEM_MC_ELSE() {
3808 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3809 } IEM_MC_ENDIF();
3810 IEM_MC_ADVANCE_RIP();
3811 IEM_MC_END();
3812 }
3813 else
3814 {
3815 /* memory target */
3816 IEM_MC_BEGIN(0, 1);
3817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3820 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3821 } IEM_MC_ELSE() {
3822 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3823 } IEM_MC_ENDIF();
3824 IEM_MC_ADVANCE_RIP();
3825 IEM_MC_END();
3826 }
3827 return VINF_SUCCESS;
3828}
3829
3830
3831/** Opcode 0x0f 0x94. */
3832FNIEMOP_DEF(iemOp_sete_Eb)
3833{
3834 IEMOP_MNEMONIC("sete Eb");
3835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3836 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3837
3838 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3839 * any way. AMD says it's "unused", whatever that means. We're
3840 * ignoring for now. */
3841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3842 {
3843 /* register target */
3844 IEM_MC_BEGIN(0, 0);
3845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3847 } IEM_MC_ELSE() {
3848 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3849 } IEM_MC_ENDIF();
3850 IEM_MC_ADVANCE_RIP();
3851 IEM_MC_END();
3852 }
3853 else
3854 {
3855 /* memory target */
3856 IEM_MC_BEGIN(0, 1);
3857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3860 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3861 } IEM_MC_ELSE() {
3862 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3863 } IEM_MC_ENDIF();
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 }
3867 return VINF_SUCCESS;
3868}
3869
3870
3871/** Opcode 0x0f 0x95. */
3872FNIEMOP_DEF(iemOp_setne_Eb)
3873{
3874 IEMOP_MNEMONIC("setne Eb");
3875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3876 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3877
3878 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3879 * any way. AMD says it's "unused", whatever that means. We're
3880 * ignoring for now. */
3881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3882 {
3883 /* register target */
3884 IEM_MC_BEGIN(0, 0);
3885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3886 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3887 } IEM_MC_ELSE() {
3888 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3889 } IEM_MC_ENDIF();
3890 IEM_MC_ADVANCE_RIP();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 /* memory target */
3896 IEM_MC_BEGIN(0, 1);
3897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3900 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3903 } IEM_MC_ENDIF();
3904 IEM_MC_ADVANCE_RIP();
3905 IEM_MC_END();
3906 }
3907 return VINF_SUCCESS;
3908}
3909
3910
3911/** Opcode 0x0f 0x96. */
3912FNIEMOP_DEF(iemOp_setbe_Eb)
3913{
3914 IEMOP_MNEMONIC("setbe Eb");
3915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3916 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3917
3918 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3919 * any way. AMD says it's "unused", whatever that means. We're
3920 * ignoring for now. */
3921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3922 {
3923 /* register target */
3924 IEM_MC_BEGIN(0, 0);
3925 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3926 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3927 } IEM_MC_ELSE() {
3928 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3929 } IEM_MC_ENDIF();
3930 IEM_MC_ADVANCE_RIP();
3931 IEM_MC_END();
3932 }
3933 else
3934 {
3935 /* memory target */
3936 IEM_MC_BEGIN(0, 1);
3937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3939 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3940 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3941 } IEM_MC_ELSE() {
3942 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3943 } IEM_MC_ENDIF();
3944 IEM_MC_ADVANCE_RIP();
3945 IEM_MC_END();
3946 }
3947 return VINF_SUCCESS;
3948}
3949
3950
3951/** Opcode 0x0f 0x97. */
3952FNIEMOP_DEF(iemOp_setnbe_Eb)
3953{
3954 IEMOP_MNEMONIC("setnbe Eb");
3955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3956 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3957
3958 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3959 * any way. AMD says it's "unused", whatever that means. We're
3960 * ignoring for now. */
3961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3962 {
3963 /* register target */
3964 IEM_MC_BEGIN(0, 0);
3965 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3966 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3967 } IEM_MC_ELSE() {
3968 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3969 } IEM_MC_ENDIF();
3970 IEM_MC_ADVANCE_RIP();
3971 IEM_MC_END();
3972 }
3973 else
3974 {
3975 /* memory target */
3976 IEM_MC_BEGIN(0, 1);
3977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3979 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3980 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3981 } IEM_MC_ELSE() {
3982 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3983 } IEM_MC_ENDIF();
3984 IEM_MC_ADVANCE_RIP();
3985 IEM_MC_END();
3986 }
3987 return VINF_SUCCESS;
3988}
3989
3990
3991/** Opcode 0x0f 0x98. */
3992FNIEMOP_DEF(iemOp_sets_Eb)
3993{
3994 IEMOP_MNEMONIC("sets Eb");
3995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3996 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3997
3998 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3999 * any way. AMD says it's "unused", whatever that means. We're
4000 * ignoring for now. */
4001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4002 {
4003 /* register target */
4004 IEM_MC_BEGIN(0, 0);
4005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4006 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4007 } IEM_MC_ELSE() {
4008 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4009 } IEM_MC_ENDIF();
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 }
4013 else
4014 {
4015 /* memory target */
4016 IEM_MC_BEGIN(0, 1);
4017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4020 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4021 } IEM_MC_ELSE() {
4022 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4023 } IEM_MC_ENDIF();
4024 IEM_MC_ADVANCE_RIP();
4025 IEM_MC_END();
4026 }
4027 return VINF_SUCCESS;
4028}
4029
4030
4031/** Opcode 0x0f 0x99. */
4032FNIEMOP_DEF(iemOp_setns_Eb)
4033{
4034 IEMOP_MNEMONIC("setns Eb");
4035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4036 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4037
4038 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4039 * any way. AMD says it's "unused", whatever that means. We're
4040 * ignoring for now. */
4041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4042 {
4043 /* register target */
4044 IEM_MC_BEGIN(0, 0);
4045 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4046 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4047 } IEM_MC_ELSE() {
4048 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4049 } IEM_MC_ENDIF();
4050 IEM_MC_ADVANCE_RIP();
4051 IEM_MC_END();
4052 }
4053 else
4054 {
4055 /* memory target */
4056 IEM_MC_BEGIN(0, 1);
4057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4059 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4060 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4061 } IEM_MC_ELSE() {
4062 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4063 } IEM_MC_ENDIF();
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 }
4067 return VINF_SUCCESS;
4068}
4069
4070
4071/** Opcode 0x0f 0x9a. */
4072FNIEMOP_DEF(iemOp_setp_Eb)
4073{
4074 IEMOP_MNEMONIC("setnp Eb");
4075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4076 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4077
4078 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4079 * any way. AMD says it's "unused", whatever that means. We're
4080 * ignoring for now. */
4081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4082 {
4083 /* register target */
4084 IEM_MC_BEGIN(0, 0);
4085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4086 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4087 } IEM_MC_ELSE() {
4088 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4089 } IEM_MC_ENDIF();
4090 IEM_MC_ADVANCE_RIP();
4091 IEM_MC_END();
4092 }
4093 else
4094 {
4095 /* memory target */
4096 IEM_MC_BEGIN(0, 1);
4097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4100 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4101 } IEM_MC_ELSE() {
4102 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4103 } IEM_MC_ENDIF();
4104 IEM_MC_ADVANCE_RIP();
4105 IEM_MC_END();
4106 }
4107 return VINF_SUCCESS;
4108}
4109
4110
4111/** Opcode 0x0f 0x9b. */
4112FNIEMOP_DEF(iemOp_setnp_Eb)
4113{
4114 IEMOP_MNEMONIC("setnp Eb");
4115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4116 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4117
4118 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4119 * any way. AMD says it's "unused", whatever that means. We're
4120 * ignoring for now. */
4121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4122 {
4123 /* register target */
4124 IEM_MC_BEGIN(0, 0);
4125 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4126 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4127 } IEM_MC_ELSE() {
4128 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4129 } IEM_MC_ENDIF();
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 }
4133 else
4134 {
4135 /* memory target */
4136 IEM_MC_BEGIN(0, 1);
4137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4139 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4140 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4141 } IEM_MC_ELSE() {
4142 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4143 } IEM_MC_ENDIF();
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 }
4147 return VINF_SUCCESS;
4148}
4149
4150
4151/** Opcode 0x0f 0x9c. */
4152FNIEMOP_DEF(iemOp_setl_Eb)
4153{
4154 IEMOP_MNEMONIC("setl Eb");
4155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4156 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4157
4158 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4159 * any way. AMD says it's "unused", whatever that means. We're
4160 * ignoring for now. */
4161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4162 {
4163 /* register target */
4164 IEM_MC_BEGIN(0, 0);
4165 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4166 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4167 } IEM_MC_ELSE() {
4168 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4169 } IEM_MC_ENDIF();
4170 IEM_MC_ADVANCE_RIP();
4171 IEM_MC_END();
4172 }
4173 else
4174 {
4175 /* memory target */
4176 IEM_MC_BEGIN(0, 1);
4177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4179 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4180 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4181 } IEM_MC_ELSE() {
4182 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4183 } IEM_MC_ENDIF();
4184 IEM_MC_ADVANCE_RIP();
4185 IEM_MC_END();
4186 }
4187 return VINF_SUCCESS;
4188}
4189
4190
4191/** Opcode 0x0f 0x9d. */
4192FNIEMOP_DEF(iemOp_setnl_Eb)
4193{
4194 IEMOP_MNEMONIC("setnl Eb");
4195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4196 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4197
4198 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4199 * any way. AMD says it's "unused", whatever that means. We're
4200 * ignoring for now. */
4201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4202 {
4203 /* register target */
4204 IEM_MC_BEGIN(0, 0);
4205 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4206 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4207 } IEM_MC_ELSE() {
4208 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4209 } IEM_MC_ENDIF();
4210 IEM_MC_ADVANCE_RIP();
4211 IEM_MC_END();
4212 }
4213 else
4214 {
4215 /* memory target */
4216 IEM_MC_BEGIN(0, 1);
4217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4219 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4220 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4221 } IEM_MC_ELSE() {
4222 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4223 } IEM_MC_ENDIF();
4224 IEM_MC_ADVANCE_RIP();
4225 IEM_MC_END();
4226 }
4227 return VINF_SUCCESS;
4228}
4229
4230
4231/** Opcode 0x0f 0x9e. */
4232FNIEMOP_DEF(iemOp_setle_Eb)
4233{
4234 IEMOP_MNEMONIC("setle Eb");
4235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4236 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4237
4238 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4239 * any way. AMD says it's "unused", whatever that means. We're
4240 * ignoring for now. */
4241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4242 {
4243 /* register target */
4244 IEM_MC_BEGIN(0, 0);
4245 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4246 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4247 } IEM_MC_ELSE() {
4248 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4249 } IEM_MC_ENDIF();
4250 IEM_MC_ADVANCE_RIP();
4251 IEM_MC_END();
4252 }
4253 else
4254 {
4255 /* memory target */
4256 IEM_MC_BEGIN(0, 1);
4257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4259 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4260 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4261 } IEM_MC_ELSE() {
4262 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4263 } IEM_MC_ENDIF();
4264 IEM_MC_ADVANCE_RIP();
4265 IEM_MC_END();
4266 }
4267 return VINF_SUCCESS;
4268}
4269
4270
4271/** Opcode 0x0f 0x9f. */
4272FNIEMOP_DEF(iemOp_setnle_Eb)
4273{
4274 IEMOP_MNEMONIC("setnle Eb");
4275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4276 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4277
4278 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4279 * any way. AMD says it's "unused", whatever that means. We're
4280 * ignoring for now. */
4281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4282 {
4283 /* register target */
4284 IEM_MC_BEGIN(0, 0);
4285 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4286 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4287 } IEM_MC_ELSE() {
4288 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4289 } IEM_MC_ENDIF();
4290 IEM_MC_ADVANCE_RIP();
4291 IEM_MC_END();
4292 }
4293 else
4294 {
4295 /* memory target */
4296 IEM_MC_BEGIN(0, 1);
4297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4299 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4300 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4301 } IEM_MC_ELSE() {
4302 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4303 } IEM_MC_ENDIF();
4304 IEM_MC_ADVANCE_RIP();
4305 IEM_MC_END();
4306 }
4307 return VINF_SUCCESS;
4308}
4309
4310
4311/**
4312 * Common 'push segment-register' helper.
4313 */
4314FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4315{
4316 IEMOP_HLP_NO_LOCK_PREFIX();
4317 if (iReg < X86_SREG_FS)
4318 IEMOP_HLP_NO_64BIT();
4319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4320
4321 switch (pIemCpu->enmEffOpSize)
4322 {
4323 case IEMMODE_16BIT:
4324 IEM_MC_BEGIN(0, 1);
4325 IEM_MC_LOCAL(uint16_t, u16Value);
4326 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4327 IEM_MC_PUSH_U16(u16Value);
4328 IEM_MC_ADVANCE_RIP();
4329 IEM_MC_END();
4330 break;
4331
4332 case IEMMODE_32BIT:
4333 IEM_MC_BEGIN(0, 1);
4334 IEM_MC_LOCAL(uint32_t, u32Value);
4335 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4336 IEM_MC_PUSH_U32_SREG(u32Value);
4337 IEM_MC_ADVANCE_RIP();
4338 IEM_MC_END();
4339 break;
4340
4341 case IEMMODE_64BIT:
4342 IEM_MC_BEGIN(0, 1);
4343 IEM_MC_LOCAL(uint64_t, u64Value);
4344 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4345 IEM_MC_PUSH_U64(u64Value);
4346 IEM_MC_ADVANCE_RIP();
4347 IEM_MC_END();
4348 break;
4349 }
4350
4351 return VINF_SUCCESS;
4352}
4353
4354
4355/** Opcode 0x0f 0xa0. */
4356FNIEMOP_DEF(iemOp_push_fs)
4357{
4358 IEMOP_MNEMONIC("push fs");
4359 IEMOP_HLP_NO_LOCK_PREFIX();
4360 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4361}
4362
4363
4364/** Opcode 0x0f 0xa1. */
4365FNIEMOP_DEF(iemOp_pop_fs)
4366{
4367 IEMOP_MNEMONIC("pop fs");
4368 IEMOP_HLP_NO_LOCK_PREFIX();
4369 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4370}
4371
4372
4373/** Opcode 0x0f 0xa2. */
4374FNIEMOP_DEF(iemOp_cpuid)
4375{
4376 IEMOP_MNEMONIC("cpuid");
4377 IEMOP_HLP_NO_LOCK_PREFIX();
4378 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4379}
4380
4381
4382/**
4383 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4384 * iemOp_bts_Ev_Gv.
4385 */
4386FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4387{
4388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4389 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4390
4391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4392 {
4393 /* register destination. */
4394 IEMOP_HLP_NO_LOCK_PREFIX();
4395 switch (pIemCpu->enmEffOpSize)
4396 {
4397 case IEMMODE_16BIT:
4398 IEM_MC_BEGIN(3, 0);
4399 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4400 IEM_MC_ARG(uint16_t, u16Src, 1);
4401 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4402
4403 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4404 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4405 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4406 IEM_MC_REF_EFLAGS(pEFlags);
4407 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4408
4409 IEM_MC_ADVANCE_RIP();
4410 IEM_MC_END();
4411 return VINF_SUCCESS;
4412
4413 case IEMMODE_32BIT:
4414 IEM_MC_BEGIN(3, 0);
4415 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4416 IEM_MC_ARG(uint32_t, u32Src, 1);
4417 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4418
4419 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4420 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4421 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4422 IEM_MC_REF_EFLAGS(pEFlags);
4423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4424
4425 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4426 IEM_MC_ADVANCE_RIP();
4427 IEM_MC_END();
4428 return VINF_SUCCESS;
4429
4430 case IEMMODE_64BIT:
4431 IEM_MC_BEGIN(3, 0);
4432 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4433 IEM_MC_ARG(uint64_t, u64Src, 1);
4434 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4435
4436 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4437 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4438 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4439 IEM_MC_REF_EFLAGS(pEFlags);
4440 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4441
4442 IEM_MC_ADVANCE_RIP();
4443 IEM_MC_END();
4444 return VINF_SUCCESS;
4445
4446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4447 }
4448 }
4449 else
4450 {
4451 /* memory destination. */
4452
4453 uint32_t fAccess;
4454 if (pImpl->pfnLockedU16)
4455 fAccess = IEM_ACCESS_DATA_RW;
4456 else /* BT */
4457 {
4458 IEMOP_HLP_NO_LOCK_PREFIX();
4459 fAccess = IEM_ACCESS_DATA_R;
4460 }
4461
4462 NOREF(fAccess);
4463
4464 /** @todo test negative bit offsets! */
4465 switch (pIemCpu->enmEffOpSize)
4466 {
4467 case IEMMODE_16BIT:
4468 IEM_MC_BEGIN(3, 2);
4469 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4470 IEM_MC_ARG(uint16_t, u16Src, 1);
4471 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4473 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4474
4475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4476 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4477 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4478 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4479 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4480 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4481 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4482 IEM_MC_FETCH_EFLAGS(EFlags);
4483
4484 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4485 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4486 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4487 else
4488 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4489 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4490
4491 IEM_MC_COMMIT_EFLAGS(EFlags);
4492 IEM_MC_ADVANCE_RIP();
4493 IEM_MC_END();
4494 return VINF_SUCCESS;
4495
4496 case IEMMODE_32BIT:
4497 IEM_MC_BEGIN(3, 2);
4498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4499 IEM_MC_ARG(uint32_t, u32Src, 1);
4500 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4502 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4503
4504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4505 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4506 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4507 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4508 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4509 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4510 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4511 IEM_MC_FETCH_EFLAGS(EFlags);
4512
4513 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4514 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4515 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4516 else
4517 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4518 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4519
4520 IEM_MC_COMMIT_EFLAGS(EFlags);
4521 IEM_MC_ADVANCE_RIP();
4522 IEM_MC_END();
4523 return VINF_SUCCESS;
4524
4525 case IEMMODE_64BIT:
4526 IEM_MC_BEGIN(3, 2);
4527 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4528 IEM_MC_ARG(uint64_t, u64Src, 1);
4529 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4531 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4532
4533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4534 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4535 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4536 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4537 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4538 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4539 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4540 IEM_MC_FETCH_EFLAGS(EFlags);
4541
4542 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4543 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4544 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4545 else
4546 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4547 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4548
4549 IEM_MC_COMMIT_EFLAGS(EFlags);
4550 IEM_MC_ADVANCE_RIP();
4551 IEM_MC_END();
4552 return VINF_SUCCESS;
4553
4554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4555 }
4556 }
4557}
4558
4559
4560/** Opcode 0x0f 0xa3. */
4561FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4562{
4563 IEMOP_MNEMONIC("bt Gv,Gv");
4564 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4565}
4566
4567
4568/**
4569 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4570 */
4571FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4572{
4573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4574 IEMOP_HLP_NO_LOCK_PREFIX();
4575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4576
4577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4578 {
4579 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4580 IEMOP_HLP_NO_LOCK_PREFIX();
4581
4582 switch (pIemCpu->enmEffOpSize)
4583 {
4584 case IEMMODE_16BIT:
4585 IEM_MC_BEGIN(4, 0);
4586 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4587 IEM_MC_ARG(uint16_t, u16Src, 1);
4588 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4589 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4590
4591 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4592 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4593 IEM_MC_REF_EFLAGS(pEFlags);
4594 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4595
4596 IEM_MC_ADVANCE_RIP();
4597 IEM_MC_END();
4598 return VINF_SUCCESS;
4599
4600 case IEMMODE_32BIT:
4601 IEM_MC_BEGIN(4, 0);
4602 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4603 IEM_MC_ARG(uint32_t, u32Src, 1);
4604 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4605 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4606
4607 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4608 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4609 IEM_MC_REF_EFLAGS(pEFlags);
4610 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4611
4612 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4613 IEM_MC_ADVANCE_RIP();
4614 IEM_MC_END();
4615 return VINF_SUCCESS;
4616
4617 case IEMMODE_64BIT:
4618 IEM_MC_BEGIN(4, 0);
4619 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4620 IEM_MC_ARG(uint64_t, u64Src, 1);
4621 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4622 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4623
4624 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4625 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4626 IEM_MC_REF_EFLAGS(pEFlags);
4627 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4628
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 return VINF_SUCCESS;
4632
4633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4634 }
4635 }
4636 else
4637 {
4638 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4639
4640 switch (pIemCpu->enmEffOpSize)
4641 {
4642 case IEMMODE_16BIT:
4643 IEM_MC_BEGIN(4, 2);
4644 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4645 IEM_MC_ARG(uint16_t, u16Src, 1);
4646 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4647 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4649
4650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4651 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4652 IEM_MC_ASSIGN(cShiftArg, cShift);
4653 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4654 IEM_MC_FETCH_EFLAGS(EFlags);
4655 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4656 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4657
4658 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4659 IEM_MC_COMMIT_EFLAGS(EFlags);
4660 IEM_MC_ADVANCE_RIP();
4661 IEM_MC_END();
4662 return VINF_SUCCESS;
4663
4664 case IEMMODE_32BIT:
4665 IEM_MC_BEGIN(4, 2);
4666 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4667 IEM_MC_ARG(uint32_t, u32Src, 1);
4668 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4669 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4671
4672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4673 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4674 IEM_MC_ASSIGN(cShiftArg, cShift);
4675 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4676 IEM_MC_FETCH_EFLAGS(EFlags);
4677 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4678 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4679
4680 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4681 IEM_MC_COMMIT_EFLAGS(EFlags);
4682 IEM_MC_ADVANCE_RIP();
4683 IEM_MC_END();
4684 return VINF_SUCCESS;
4685
4686 case IEMMODE_64BIT:
4687 IEM_MC_BEGIN(4, 2);
4688 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4689 IEM_MC_ARG(uint64_t, u64Src, 1);
4690 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4693
4694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4695 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4696 IEM_MC_ASSIGN(cShiftArg, cShift);
4697 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4698 IEM_MC_FETCH_EFLAGS(EFlags);
4699 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4700 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4701
4702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4703 IEM_MC_COMMIT_EFLAGS(EFlags);
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 return VINF_SUCCESS;
4707
4708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4709 }
4710 }
4711}
4712
4713
4714/**
4715 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4716 */
4717FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4718{
4719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4720 IEMOP_HLP_NO_LOCK_PREFIX();
4721 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4722
4723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4724 {
4725 IEMOP_HLP_NO_LOCK_PREFIX();
4726
4727 switch (pIemCpu->enmEffOpSize)
4728 {
4729 case IEMMODE_16BIT:
4730 IEM_MC_BEGIN(4, 0);
4731 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4732 IEM_MC_ARG(uint16_t, u16Src, 1);
4733 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4734 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4735
4736 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4737 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4738 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4739 IEM_MC_REF_EFLAGS(pEFlags);
4740 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4741
4742 IEM_MC_ADVANCE_RIP();
4743 IEM_MC_END();
4744 return VINF_SUCCESS;
4745
4746 case IEMMODE_32BIT:
4747 IEM_MC_BEGIN(4, 0);
4748 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4749 IEM_MC_ARG(uint32_t, u32Src, 1);
4750 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4751 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4752
4753 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4754 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4755 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4756 IEM_MC_REF_EFLAGS(pEFlags);
4757 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4758
4759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4760 IEM_MC_ADVANCE_RIP();
4761 IEM_MC_END();
4762 return VINF_SUCCESS;
4763
4764 case IEMMODE_64BIT:
4765 IEM_MC_BEGIN(4, 0);
4766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4767 IEM_MC_ARG(uint64_t, u64Src, 1);
4768 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4769 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4770
4771 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4772 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4773 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4774 IEM_MC_REF_EFLAGS(pEFlags);
4775 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4776
4777 IEM_MC_ADVANCE_RIP();
4778 IEM_MC_END();
4779 return VINF_SUCCESS;
4780
4781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4782 }
4783 }
4784 else
4785 {
4786 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4787
4788 switch (pIemCpu->enmEffOpSize)
4789 {
4790 case IEMMODE_16BIT:
4791 IEM_MC_BEGIN(4, 2);
4792 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4793 IEM_MC_ARG(uint16_t, u16Src, 1);
4794 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4795 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4797
4798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4799 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4800 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4801 IEM_MC_FETCH_EFLAGS(EFlags);
4802 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4803 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4804
4805 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4806 IEM_MC_COMMIT_EFLAGS(EFlags);
4807 IEM_MC_ADVANCE_RIP();
4808 IEM_MC_END();
4809 return VINF_SUCCESS;
4810
4811 case IEMMODE_32BIT:
4812 IEM_MC_BEGIN(4, 2);
4813 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4814 IEM_MC_ARG(uint32_t, u32Src, 1);
4815 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4816 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4818
4819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4820 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4821 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4822 IEM_MC_FETCH_EFLAGS(EFlags);
4823 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4824 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4825
4826 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4827 IEM_MC_COMMIT_EFLAGS(EFlags);
4828 IEM_MC_ADVANCE_RIP();
4829 IEM_MC_END();
4830 return VINF_SUCCESS;
4831
4832 case IEMMODE_64BIT:
4833 IEM_MC_BEGIN(4, 2);
4834 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4835 IEM_MC_ARG(uint64_t, u64Src, 1);
4836 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4837 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4839
4840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4841 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4842 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4843 IEM_MC_FETCH_EFLAGS(EFlags);
4844 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4845 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4846
4847 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4848 IEM_MC_COMMIT_EFLAGS(EFlags);
4849 IEM_MC_ADVANCE_RIP();
4850 IEM_MC_END();
4851 return VINF_SUCCESS;
4852
4853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4854 }
4855 }
4856}
4857
4858
4859
4860/** Opcode 0x0f 0xa4. */
4861FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4862{
4863 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4864 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4865}
4866
4867
4868/** Opcode 0x0f 0xa7. */
4869FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4870{
4871 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4872 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4873}
4874
4875
4876/** Opcode 0x0f 0xa8. */
4877FNIEMOP_DEF(iemOp_push_gs)
4878{
4879 IEMOP_MNEMONIC("push gs");
4880 IEMOP_HLP_NO_LOCK_PREFIX();
4881 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4882}
4883
4884
4885/** Opcode 0x0f 0xa9. */
4886FNIEMOP_DEF(iemOp_pop_gs)
4887{
4888 IEMOP_MNEMONIC("pop gs");
4889 IEMOP_HLP_NO_LOCK_PREFIX();
4890 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4891}
4892
4893
4894/** Opcode 0x0f 0xaa. */
4895FNIEMOP_STUB(iemOp_rsm);
4896
4897
4898/** Opcode 0x0f 0xab. */
4899FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4900{
4901 IEMOP_MNEMONIC("bts Ev,Gv");
4902 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4903}
4904
4905
4906/** Opcode 0x0f 0xac. */
4907FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4908{
4909 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4910 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4911}
4912
4913
4914/** Opcode 0x0f 0xad. */
4915FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4916{
4917 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4918 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
4919}
4920
4921
4922/** Opcode 0x0f 0xae mem/0. */
4923FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
4924{
4925 IEMOP_MNEMONIC("fxsave m512");
4926 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
4927 return IEMOP_RAISE_INVALID_OPCODE();
4928
4929 IEM_MC_BEGIN(3, 1);
4930 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4931 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4932 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4936 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
4937 IEM_MC_END();
4938 return VINF_SUCCESS;
4939}
4940
4941
4942/** Opcode 0x0f 0xae mem/1. */
4943FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
4944{
4945 IEMOP_MNEMONIC("fxrstor m512");
4946 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
4947 return IEMOP_RAISE_INVALID_OPCODE();
4948
4949 IEM_MC_BEGIN(3, 1);
4950 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4951 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4952 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4955 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4956 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
4957 IEM_MC_END();
4958 return VINF_SUCCESS;
4959}
4960
4961
4962/** Opcode 0x0f 0xae mem/2. */
4963FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
4964
4965/** Opcode 0x0f 0xae mem/3. */
4966FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
4967
4968/** Opcode 0x0f 0xae mem/4. */
4969FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
4970
4971/** Opcode 0x0f 0xae mem/5. */
4972FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
4973
4974/** Opcode 0x0f 0xae mem/6. */
4975FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
4976
4977/** Opcode 0x0f 0xae mem/7. */
4978FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
4979
4980
4981/** Opcode 0x0f 0xae 11b/5. */
4982FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
4983{
4984 IEMOP_MNEMONIC("lfence");
4985 IEMOP_HLP_NO_LOCK_PREFIX();
4986 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_SSE2))
4987 return IEMOP_RAISE_INVALID_OPCODE();
4988
4989 IEM_MC_BEGIN(0, 0);
4990 if (IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX_ON_HOST(X86_CPUID_FEATURE_EDX_SSE2))
4991 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
4992 else
4993 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
4994 IEM_MC_ADVANCE_RIP();
4995 IEM_MC_END();
4996 return VINF_SUCCESS;
4997}
4998
4999
5000/** Opcode 0x0f 0xae 11b/6. */
5001FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5002{
5003 IEMOP_MNEMONIC("mfence");
5004 IEMOP_HLP_NO_LOCK_PREFIX();
5005 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_SSE2))
5006 return IEMOP_RAISE_INVALID_OPCODE();
5007
5008 IEM_MC_BEGIN(0, 0);
5009 if (IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX_ON_HOST(X86_CPUID_FEATURE_EDX_SSE2))
5010 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5011 else
5012 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5013 IEM_MC_ADVANCE_RIP();
5014 IEM_MC_END();
5015 return VINF_SUCCESS;
5016}
5017
5018
5019/** Opcode 0x0f 0xae 11b/7. */
5020FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5021{
5022 IEMOP_MNEMONIC("sfence");
5023 IEMOP_HLP_NO_LOCK_PREFIX();
5024 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_SSE2))
5025 return IEMOP_RAISE_INVALID_OPCODE();
5026
5027 IEM_MC_BEGIN(0, 0);
5028 if (IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX_ON_HOST(X86_CPUID_FEATURE_EDX_SSE2))
5029 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5030 else
5031 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5032 IEM_MC_ADVANCE_RIP();
5033 IEM_MC_END();
5034 return VINF_SUCCESS;
5035}
5036
5037
5038/** Opcode 0xf3 0x0f 0xae 11b/0. */
5039FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5040
5041/** Opcode 0xf3 0x0f 0xae 11b/1. */
5042FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5043
5044/** Opcode 0xf3 0x0f 0xae 11b/2. */
5045FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5046
5047/** Opcode 0xf3 0x0f 0xae 11b/3. */
5048FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5049
5050
5051/** Opcode 0x0f 0xae. */
5052FNIEMOP_DEF(iemOp_Grp15)
5053{
5054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5055 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5056 {
5057 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5058 {
5059 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5060 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5061 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5062 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5063 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5064 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5065 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5066 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5068 }
5069 }
5070 else
5071 {
5072 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5073 {
5074 case 0:
5075 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5076 {
5077 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5078 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5079 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5080 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5081 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5082 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5083 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5084 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5086 }
5087 break;
5088
5089 case IEM_OP_PRF_REPZ:
5090 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5091 {
5092 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5093 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5094 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5095 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5096 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5097 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5098 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5099 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5101 }
5102 break;
5103
5104 default:
5105 return IEMOP_RAISE_INVALID_OPCODE();
5106 }
5107 }
5108}
5109
5110
5111/** Opcode 0x0f 0xaf. */
5112FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5113{
5114 IEMOP_MNEMONIC("imul Gv,Ev");
5115 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5116 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5117}
5118
5119
5120/** Opcode 0x0f 0xb0. */
5121FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5122{
5123 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5125
5126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5127 {
5128 IEMOP_HLP_DONE_DECODING();
5129 IEM_MC_BEGIN(4, 0);
5130 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5131 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5132 IEM_MC_ARG(uint8_t, u8Src, 2);
5133 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5134
5135 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5136 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5137 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5138 IEM_MC_REF_EFLAGS(pEFlags);
5139 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5140 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5141 else
5142 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5143
5144 IEM_MC_ADVANCE_RIP();
5145 IEM_MC_END();
5146 }
5147 else
5148 {
5149 IEM_MC_BEGIN(4, 3);
5150 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5151 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5152 IEM_MC_ARG(uint8_t, u8Src, 2);
5153 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5155 IEM_MC_LOCAL(uint8_t, u8Al);
5156
5157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5158 IEMOP_HLP_DONE_DECODING();
5159 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5160 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5161 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5162 IEM_MC_FETCH_EFLAGS(EFlags);
5163 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5164 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5165 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5166 else
5167 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5168
5169 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5170 IEM_MC_COMMIT_EFLAGS(EFlags);
5171 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5172 IEM_MC_ADVANCE_RIP();
5173 IEM_MC_END();
5174 }
5175 return VINF_SUCCESS;
5176}
5177
5178/** Opcode 0x0f 0xb1. */
5179FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5180{
5181 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5183
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 IEMOP_HLP_DONE_DECODING();
5187 switch (pIemCpu->enmEffOpSize)
5188 {
5189 case IEMMODE_16BIT:
5190 IEM_MC_BEGIN(4, 0);
5191 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5192 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5193 IEM_MC_ARG(uint16_t, u16Src, 2);
5194 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5195
5196 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5197 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5198 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5199 IEM_MC_REF_EFLAGS(pEFlags);
5200 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5202 else
5203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5204
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 return VINF_SUCCESS;
5208
5209 case IEMMODE_32BIT:
5210 IEM_MC_BEGIN(4, 0);
5211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5212 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5213 IEM_MC_ARG(uint32_t, u32Src, 2);
5214 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5215
5216 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5217 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5218 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5219 IEM_MC_REF_EFLAGS(pEFlags);
5220 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5222 else
5223 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5224
5225 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5226 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5227 IEM_MC_ADVANCE_RIP();
5228 IEM_MC_END();
5229 return VINF_SUCCESS;
5230
5231 case IEMMODE_64BIT:
5232 IEM_MC_BEGIN(4, 0);
5233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5234 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5235#ifdef RT_ARCH_X86
5236 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5237#else
5238 IEM_MC_ARG(uint64_t, u64Src, 2);
5239#endif
5240 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5241
5242 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5243 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5244 IEM_MC_REF_EFLAGS(pEFlags);
5245#ifdef RT_ARCH_X86
5246 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5247 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5249 else
5250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5251#else
5252 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5253 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5255 else
5256 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5257#endif
5258
5259 IEM_MC_ADVANCE_RIP();
5260 IEM_MC_END();
5261 return VINF_SUCCESS;
5262
5263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5264 }
5265 }
5266 else
5267 {
5268 switch (pIemCpu->enmEffOpSize)
5269 {
5270 case IEMMODE_16BIT:
5271 IEM_MC_BEGIN(4, 3);
5272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5273 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5274 IEM_MC_ARG(uint16_t, u16Src, 2);
5275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5277 IEM_MC_LOCAL(uint16_t, u16Ax);
5278
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5280 IEMOP_HLP_DONE_DECODING();
5281 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5282 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5283 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5284 IEM_MC_FETCH_EFLAGS(EFlags);
5285 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5286 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5287 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5288 else
5289 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5290
5291 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5292 IEM_MC_COMMIT_EFLAGS(EFlags);
5293 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5294 IEM_MC_ADVANCE_RIP();
5295 IEM_MC_END();
5296 return VINF_SUCCESS;
5297
5298 case IEMMODE_32BIT:
5299 IEM_MC_BEGIN(4, 3);
5300 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5301 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5302 IEM_MC_ARG(uint32_t, u32Src, 2);
5303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5305 IEM_MC_LOCAL(uint32_t, u32Eax);
5306
5307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5308 IEMOP_HLP_DONE_DECODING();
5309 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5310 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5311 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5312 IEM_MC_FETCH_EFLAGS(EFlags);
5313 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5314 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5315 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5316 else
5317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5318
5319 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5320 IEM_MC_COMMIT_EFLAGS(EFlags);
5321 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5322 IEM_MC_ADVANCE_RIP();
5323 IEM_MC_END();
5324 return VINF_SUCCESS;
5325
5326 case IEMMODE_64BIT:
5327 IEM_MC_BEGIN(4, 3);
5328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5329 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5330#ifdef RT_ARCH_X86
5331 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5332#else
5333 IEM_MC_ARG(uint64_t, u64Src, 2);
5334#endif
5335 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5337 IEM_MC_LOCAL(uint64_t, u64Rax);
5338
5339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5340 IEMOP_HLP_DONE_DECODING();
5341 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5342 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5343 IEM_MC_FETCH_EFLAGS(EFlags);
5344 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5345#ifdef RT_ARCH_X86
5346 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5347 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5348 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5349 else
5350 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5351#else
5352 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5353 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5354 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5355 else
5356 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5357#endif
5358
5359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5360 IEM_MC_COMMIT_EFLAGS(EFlags);
5361 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 return VINF_SUCCESS;
5365
5366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5367 }
5368 }
5369}
5370
5371
5372FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5373{
5374 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5375 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5376
5377 switch (pIemCpu->enmEffOpSize)
5378 {
5379 case IEMMODE_16BIT:
5380 IEM_MC_BEGIN(5, 1);
5381 IEM_MC_ARG(uint16_t, uSel, 0);
5382 IEM_MC_ARG(uint16_t, offSeg, 1);
5383 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5384 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5386 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5389 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5390 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5391 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5392 IEM_MC_END();
5393 return VINF_SUCCESS;
5394
5395 case IEMMODE_32BIT:
5396 IEM_MC_BEGIN(5, 1);
5397 IEM_MC_ARG(uint16_t, uSel, 0);
5398 IEM_MC_ARG(uint32_t, offSeg, 1);
5399 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5400 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5401 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5402 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5406 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5407 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5408 IEM_MC_END();
5409 return VINF_SUCCESS;
5410
5411 case IEMMODE_64BIT:
5412 IEM_MC_BEGIN(5, 1);
5413 IEM_MC_ARG(uint16_t, uSel, 0);
5414 IEM_MC_ARG(uint64_t, offSeg, 1);
5415 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5416 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5417 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5418 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5421 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5422 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5423 else
5424 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5425 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5426 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429
5430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5431 }
5432}
5433
5434
5435/** Opcode 0x0f 0xb2. */
5436FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5437{
5438 IEMOP_MNEMONIC("lss Gv,Mp");
5439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5441 return IEMOP_RAISE_INVALID_OPCODE();
5442 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5443}
5444
5445
5446/** Opcode 0x0f 0xb3. */
5447FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5448{
5449 IEMOP_MNEMONIC("btr Ev,Gv");
5450 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5451}
5452
5453
5454/** Opcode 0x0f 0xb4. */
5455FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5456{
5457 IEMOP_MNEMONIC("lfs Gv,Mp");
5458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5460 return IEMOP_RAISE_INVALID_OPCODE();
5461 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5462}
5463
5464
5465/** Opcode 0x0f 0xb5. */
5466FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5467{
5468 IEMOP_MNEMONIC("lgs Gv,Mp");
5469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5471 return IEMOP_RAISE_INVALID_OPCODE();
5472 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5473}
5474
5475
5476/** Opcode 0x0f 0xb6. */
5477FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5478{
5479 IEMOP_MNEMONIC("movzx Gv,Eb");
5480
5481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5482 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5483
5484 /*
5485 * If rm is denoting a register, no more instruction bytes.
5486 */
5487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5488 {
5489 switch (pIemCpu->enmEffOpSize)
5490 {
5491 case IEMMODE_16BIT:
5492 IEM_MC_BEGIN(0, 1);
5493 IEM_MC_LOCAL(uint16_t, u16Value);
5494 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5495 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5496 IEM_MC_ADVANCE_RIP();
5497 IEM_MC_END();
5498 return VINF_SUCCESS;
5499
5500 case IEMMODE_32BIT:
5501 IEM_MC_BEGIN(0, 1);
5502 IEM_MC_LOCAL(uint32_t, u32Value);
5503 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5504 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508
5509 case IEMMODE_64BIT:
5510 IEM_MC_BEGIN(0, 1);
5511 IEM_MC_LOCAL(uint64_t, u64Value);
5512 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5513 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5519 }
5520 }
5521 else
5522 {
5523 /*
5524 * We're loading a register from memory.
5525 */
5526 switch (pIemCpu->enmEffOpSize)
5527 {
5528 case IEMMODE_16BIT:
5529 IEM_MC_BEGIN(0, 2);
5530 IEM_MC_LOCAL(uint16_t, u16Value);
5531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5533 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5534 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5535 IEM_MC_ADVANCE_RIP();
5536 IEM_MC_END();
5537 return VINF_SUCCESS;
5538
5539 case IEMMODE_32BIT:
5540 IEM_MC_BEGIN(0, 2);
5541 IEM_MC_LOCAL(uint32_t, u32Value);
5542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5544 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5545 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5546 IEM_MC_ADVANCE_RIP();
5547 IEM_MC_END();
5548 return VINF_SUCCESS;
5549
5550 case IEMMODE_64BIT:
5551 IEM_MC_BEGIN(0, 2);
5552 IEM_MC_LOCAL(uint64_t, u64Value);
5553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5555 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5556 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5557 IEM_MC_ADVANCE_RIP();
5558 IEM_MC_END();
5559 return VINF_SUCCESS;
5560
5561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5562 }
5563 }
5564}
5565
5566
5567/** Opcode 0x0f 0xb7. */
5568FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5569{
5570 IEMOP_MNEMONIC("movzx Gv,Ew");
5571
5572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5573 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5574
5575 /** @todo Not entirely sure how the operand size prefix is handled here,
5576 * assuming that it will be ignored. Would be nice to have a few
5577 * test for this. */
5578 /*
5579 * If rm is denoting a register, no more instruction bytes.
5580 */
5581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5582 {
5583 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5584 {
5585 IEM_MC_BEGIN(0, 1);
5586 IEM_MC_LOCAL(uint32_t, u32Value);
5587 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5588 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5589 IEM_MC_ADVANCE_RIP();
5590 IEM_MC_END();
5591 }
5592 else
5593 {
5594 IEM_MC_BEGIN(0, 1);
5595 IEM_MC_LOCAL(uint64_t, u64Value);
5596 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5597 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 }
5601 }
5602 else
5603 {
5604 /*
5605 * We're loading a register from memory.
5606 */
5607 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5608 {
5609 IEM_MC_BEGIN(0, 2);
5610 IEM_MC_LOCAL(uint32_t, u32Value);
5611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5613 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5614 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5615 IEM_MC_ADVANCE_RIP();
5616 IEM_MC_END();
5617 }
5618 else
5619 {
5620 IEM_MC_BEGIN(0, 2);
5621 IEM_MC_LOCAL(uint64_t, u64Value);
5622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5624 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5625 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5626 IEM_MC_ADVANCE_RIP();
5627 IEM_MC_END();
5628 }
5629 }
5630 return VINF_SUCCESS;
5631}
5632
5633
5634/** Opcode 0x0f 0xb8. */
5635FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5636
5637
5638/** Opcode 0x0f 0xb9. */
5639FNIEMOP_DEF(iemOp_Grp10)
5640{
5641 Log(("iemOp_Grp10 -> #UD\n"));
5642 return IEMOP_RAISE_INVALID_OPCODE();
5643}
5644
5645
5646/** Opcode 0x0f 0xba. */
5647FNIEMOP_DEF(iemOp_Grp8)
5648{
5649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5650 PCIEMOPBINSIZES pImpl;
5651 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5652 {
5653 case 0: case 1: case 2: case 3:
5654 return IEMOP_RAISE_INVALID_OPCODE();
5655 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5656 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5657 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5658 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5660 }
5661 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5662
5663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5664 {
5665 /* register destination. */
5666 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5667 IEMOP_HLP_NO_LOCK_PREFIX();
5668
5669 switch (pIemCpu->enmEffOpSize)
5670 {
5671 case IEMMODE_16BIT:
5672 IEM_MC_BEGIN(3, 0);
5673 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5674 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5675 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5676
5677 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5678 IEM_MC_REF_EFLAGS(pEFlags);
5679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5680
5681 IEM_MC_ADVANCE_RIP();
5682 IEM_MC_END();
5683 return VINF_SUCCESS;
5684
5685 case IEMMODE_32BIT:
5686 IEM_MC_BEGIN(3, 0);
5687 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5688 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5690
5691 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5692 IEM_MC_REF_EFLAGS(pEFlags);
5693 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5694
5695 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5696 IEM_MC_ADVANCE_RIP();
5697 IEM_MC_END();
5698 return VINF_SUCCESS;
5699
5700 case IEMMODE_64BIT:
5701 IEM_MC_BEGIN(3, 0);
5702 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5703 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5704 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5705
5706 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5707 IEM_MC_REF_EFLAGS(pEFlags);
5708 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5709
5710 IEM_MC_ADVANCE_RIP();
5711 IEM_MC_END();
5712 return VINF_SUCCESS;
5713
5714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5715 }
5716 }
5717 else
5718 {
5719 /* memory destination. */
5720
5721 uint32_t fAccess;
5722 if (pImpl->pfnLockedU16)
5723 fAccess = IEM_ACCESS_DATA_RW;
5724 else /* BT */
5725 {
5726 IEMOP_HLP_NO_LOCK_PREFIX();
5727 fAccess = IEM_ACCESS_DATA_R;
5728 }
5729
5730 /** @todo test negative bit offsets! */
5731 switch (pIemCpu->enmEffOpSize)
5732 {
5733 case IEMMODE_16BIT:
5734 IEM_MC_BEGIN(3, 1);
5735 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5736 IEM_MC_ARG(uint16_t, u16Src, 1);
5737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5739
5740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5741 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5742 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5743 IEM_MC_FETCH_EFLAGS(EFlags);
5744 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5745 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5747 else
5748 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5750
5751 IEM_MC_COMMIT_EFLAGS(EFlags);
5752 IEM_MC_ADVANCE_RIP();
5753 IEM_MC_END();
5754 return VINF_SUCCESS;
5755
5756 case IEMMODE_32BIT:
5757 IEM_MC_BEGIN(3, 1);
5758 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5759 IEM_MC_ARG(uint32_t, u32Src, 1);
5760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5762
5763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5764 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5765 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5766 IEM_MC_FETCH_EFLAGS(EFlags);
5767 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5768 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5770 else
5771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5772 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5773
5774 IEM_MC_COMMIT_EFLAGS(EFlags);
5775 IEM_MC_ADVANCE_RIP();
5776 IEM_MC_END();
5777 return VINF_SUCCESS;
5778
5779 case IEMMODE_64BIT:
5780 IEM_MC_BEGIN(3, 1);
5781 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5782 IEM_MC_ARG(uint64_t, u64Src, 1);
5783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5785
5786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5787 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5788 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5789 IEM_MC_FETCH_EFLAGS(EFlags);
5790 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5791 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5795 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5796
5797 IEM_MC_COMMIT_EFLAGS(EFlags);
5798 IEM_MC_ADVANCE_RIP();
5799 IEM_MC_END();
5800 return VINF_SUCCESS;
5801
5802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5803 }
5804 }
5805
5806}
5807
5808
5809/** Opcode 0x0f 0xbb. */
5810FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5811{
5812 IEMOP_MNEMONIC("btc Ev,Gv");
5813 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5814}
5815
5816
5817/** Opcode 0x0f 0xbc. */
5818FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5819{
5820 IEMOP_MNEMONIC("bsf Gv,Ev");
5821 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5822 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5823}
5824
5825
5826/** Opcode 0x0f 0xbd. */
5827FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5828{
5829 IEMOP_MNEMONIC("bsr Gv,Ev");
5830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5831 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5832}
5833
5834
5835/** Opcode 0x0f 0xbe. */
5836FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5837{
5838 IEMOP_MNEMONIC("movsx Gv,Eb");
5839
5840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5841 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5842
5843 /*
5844 * If rm is denoting a register, no more instruction bytes.
5845 */
5846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5847 {
5848 switch (pIemCpu->enmEffOpSize)
5849 {
5850 case IEMMODE_16BIT:
5851 IEM_MC_BEGIN(0, 1);
5852 IEM_MC_LOCAL(uint16_t, u16Value);
5853 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5854 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5855 IEM_MC_ADVANCE_RIP();
5856 IEM_MC_END();
5857 return VINF_SUCCESS;
5858
5859 case IEMMODE_32BIT:
5860 IEM_MC_BEGIN(0, 1);
5861 IEM_MC_LOCAL(uint32_t, u32Value);
5862 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5863 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5864 IEM_MC_ADVANCE_RIP();
5865 IEM_MC_END();
5866 return VINF_SUCCESS;
5867
5868 case IEMMODE_64BIT:
5869 IEM_MC_BEGIN(0, 1);
5870 IEM_MC_LOCAL(uint64_t, u64Value);
5871 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5872 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5873 IEM_MC_ADVANCE_RIP();
5874 IEM_MC_END();
5875 return VINF_SUCCESS;
5876
5877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5878 }
5879 }
5880 else
5881 {
5882 /*
5883 * We're loading a register from memory.
5884 */
5885 switch (pIemCpu->enmEffOpSize)
5886 {
5887 case IEMMODE_16BIT:
5888 IEM_MC_BEGIN(0, 2);
5889 IEM_MC_LOCAL(uint16_t, u16Value);
5890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5892 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5893 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5894 IEM_MC_ADVANCE_RIP();
5895 IEM_MC_END();
5896 return VINF_SUCCESS;
5897
5898 case IEMMODE_32BIT:
5899 IEM_MC_BEGIN(0, 2);
5900 IEM_MC_LOCAL(uint32_t, u32Value);
5901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5903 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5904 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5905 IEM_MC_ADVANCE_RIP();
5906 IEM_MC_END();
5907 return VINF_SUCCESS;
5908
5909 case IEMMODE_64BIT:
5910 IEM_MC_BEGIN(0, 2);
5911 IEM_MC_LOCAL(uint64_t, u64Value);
5912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5914 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5915 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5916 IEM_MC_ADVANCE_RIP();
5917 IEM_MC_END();
5918 return VINF_SUCCESS;
5919
5920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5921 }
5922 }
5923}
5924
5925
5926/** Opcode 0x0f 0xbf. */
5927FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
5928{
5929 IEMOP_MNEMONIC("movsx Gv,Ew");
5930
5931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5932 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5933
5934 /** @todo Not entirely sure how the operand size prefix is handled here,
5935 * assuming that it will be ignored. Would be nice to have a few
5936 * test for this. */
5937 /*
5938 * If rm is denoting a register, no more instruction bytes.
5939 */
5940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5941 {
5942 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5943 {
5944 IEM_MC_BEGIN(0, 1);
5945 IEM_MC_LOCAL(uint32_t, u32Value);
5946 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5947 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5948 IEM_MC_ADVANCE_RIP();
5949 IEM_MC_END();
5950 }
5951 else
5952 {
5953 IEM_MC_BEGIN(0, 1);
5954 IEM_MC_LOCAL(uint64_t, u64Value);
5955 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5956 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5957 IEM_MC_ADVANCE_RIP();
5958 IEM_MC_END();
5959 }
5960 }
5961 else
5962 {
5963 /*
5964 * We're loading a register from memory.
5965 */
5966 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5967 {
5968 IEM_MC_BEGIN(0, 2);
5969 IEM_MC_LOCAL(uint32_t, u32Value);
5970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5972 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5973 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5974 IEM_MC_ADVANCE_RIP();
5975 IEM_MC_END();
5976 }
5977 else
5978 {
5979 IEM_MC_BEGIN(0, 2);
5980 IEM_MC_LOCAL(uint64_t, u64Value);
5981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5983 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5984 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5985 IEM_MC_ADVANCE_RIP();
5986 IEM_MC_END();
5987 }
5988 }
5989 return VINF_SUCCESS;
5990}
5991
5992
5993/** Opcode 0x0f 0xc0. */
5994FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
5995{
5996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5997 IEMOP_MNEMONIC("xadd Eb,Gb");
5998
5999 /*
6000 * If rm is denoting a register, no more instruction bytes.
6001 */
6002 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6003 {
6004 IEMOP_HLP_NO_LOCK_PREFIX();
6005
6006 IEM_MC_BEGIN(3, 0);
6007 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6008 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6009 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6010
6011 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6012 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6013 IEM_MC_REF_EFLAGS(pEFlags);
6014 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6015
6016 IEM_MC_ADVANCE_RIP();
6017 IEM_MC_END();
6018 }
6019 else
6020 {
6021 /*
6022 * We're accessing memory.
6023 */
6024 IEM_MC_BEGIN(3, 3);
6025 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6026 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6027 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6028 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6030
6031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6032 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6033 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6034 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6035 IEM_MC_FETCH_EFLAGS(EFlags);
6036 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6037 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6038 else
6039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6040
6041 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6042 IEM_MC_COMMIT_EFLAGS(EFlags);
6043 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 return VINF_SUCCESS;
6047 }
6048 return VINF_SUCCESS;
6049}
6050
6051
6052/** Opcode 0x0f 0xc1. */
6053FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6054{
6055 IEMOP_MNEMONIC("xadd Ev,Gv");
6056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6057
6058 /*
6059 * If rm is denoting a register, no more instruction bytes.
6060 */
6061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6062 {
6063 IEMOP_HLP_NO_LOCK_PREFIX();
6064
6065 switch (pIemCpu->enmEffOpSize)
6066 {
6067 case IEMMODE_16BIT:
6068 IEM_MC_BEGIN(3, 0);
6069 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6070 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6072
6073 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6074 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6075 IEM_MC_REF_EFLAGS(pEFlags);
6076 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6077
6078 IEM_MC_ADVANCE_RIP();
6079 IEM_MC_END();
6080 return VINF_SUCCESS;
6081
6082 case IEMMODE_32BIT:
6083 IEM_MC_BEGIN(3, 0);
6084 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6085 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6087
6088 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6089 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6090 IEM_MC_REF_EFLAGS(pEFlags);
6091 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6092
6093 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6094 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 return VINF_SUCCESS;
6098
6099 case IEMMODE_64BIT:
6100 IEM_MC_BEGIN(3, 0);
6101 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6102 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6104
6105 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6106 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6107 IEM_MC_REF_EFLAGS(pEFlags);
6108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6109
6110 IEM_MC_ADVANCE_RIP();
6111 IEM_MC_END();
6112 return VINF_SUCCESS;
6113
6114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6115 }
6116 }
6117 else
6118 {
6119 /*
6120 * We're accessing memory.
6121 */
6122 switch (pIemCpu->enmEffOpSize)
6123 {
6124 case IEMMODE_16BIT:
6125 IEM_MC_BEGIN(3, 3);
6126 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6127 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6128 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6129 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6131
6132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6133 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6134 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6135 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6136 IEM_MC_FETCH_EFLAGS(EFlags);
6137 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6139 else
6140 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6141
6142 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6143 IEM_MC_COMMIT_EFLAGS(EFlags);
6144 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6145 IEM_MC_ADVANCE_RIP();
6146 IEM_MC_END();
6147 return VINF_SUCCESS;
6148
6149 case IEMMODE_32BIT:
6150 IEM_MC_BEGIN(3, 3);
6151 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6152 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6153 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6154 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6156
6157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6158 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6159 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6160 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6161 IEM_MC_FETCH_EFLAGS(EFlags);
6162 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6164 else
6165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6166
6167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6168 IEM_MC_COMMIT_EFLAGS(EFlags);
6169 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_64BIT:
6175 IEM_MC_BEGIN(3, 3);
6176 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6177 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6178 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6179 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6181
6182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6183 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6184 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6185 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6186 IEM_MC_FETCH_EFLAGS(EFlags);
6187 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6188 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6189 else
6190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6191
6192 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6193 IEM_MC_COMMIT_EFLAGS(EFlags);
6194 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6195 IEM_MC_ADVANCE_RIP();
6196 IEM_MC_END();
6197 return VINF_SUCCESS;
6198
6199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6200 }
6201 }
6202}
6203
6204/** Opcode 0x0f 0xc2. */
6205FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6206
6207/** Opcode 0x0f 0xc3. */
6208FNIEMOP_STUB(iemOp_movnti_My_Gy);
6209
6210/** Opcode 0x0f 0xc4. */
6211FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6212
6213/** Opcode 0x0f 0xc5. */
6214FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6215
6216/** Opcode 0x0f 0xc6. */
6217FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6218
6219
6220/** Opcode 0x0f 0xc7 !11/1. */
6221FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6222{
6223 IEMOP_MNEMONIC("cmpxchg8b Mq");
6224
6225 IEM_MC_BEGIN(4, 3);
6226 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6227 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6228 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6229 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6230 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6231 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6233
6234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6235 IEMOP_HLP_DONE_DECODING();
6236 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6237
6238 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6239 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6240 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6241
6242 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6243 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6244 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6245
6246 IEM_MC_FETCH_EFLAGS(EFlags);
6247 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6249 else
6250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6251
6252 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6253 IEM_MC_COMMIT_EFLAGS(EFlags);
6254 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6255 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6256 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6257 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6258 IEM_MC_ENDIF();
6259 IEM_MC_ADVANCE_RIP();
6260
6261 IEM_MC_END();
6262 return VINF_SUCCESS;
6263}
6264
6265
6266/** Opcode REX.W 0x0f 0xc7 !11/1. */
6267FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6268
6269/** Opcode 0x0f 0xc7 11/6. */
6270FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6271
6272/** Opcode 0x0f 0xc7 !11/6. */
6273FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6274
6275/** Opcode 0x66 0x0f 0xc7 !11/6. */
6276FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6277
6278/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6279FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6280
6281/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6282FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6283
6284
6285/** Opcode 0x0f 0xc7. */
6286FNIEMOP_DEF(iemOp_Grp9)
6287{
6288 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6290 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6291 {
6292 case 0: case 2: case 3: case 4: case 5:
6293 return IEMOP_RAISE_INVALID_OPCODE();
6294 case 1:
6295 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6296 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6297 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6298 return IEMOP_RAISE_INVALID_OPCODE();
6299 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6300 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6301 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6302 case 6:
6303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6304 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6305 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6306 {
6307 case 0:
6308 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6309 case IEM_OP_PRF_SIZE_OP:
6310 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6311 case IEM_OP_PRF_REPZ:
6312 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6313 default:
6314 return IEMOP_RAISE_INVALID_OPCODE();
6315 }
6316 case 7:
6317 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6318 {
6319 case 0:
6320 case IEM_OP_PRF_REPZ:
6321 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6322 default:
6323 return IEMOP_RAISE_INVALID_OPCODE();
6324 }
6325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6326 }
6327}
6328
6329
6330/**
6331 * Common 'bswap register' helper.
6332 */
6333FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6334{
6335 IEMOP_HLP_NO_LOCK_PREFIX();
6336 switch (pIemCpu->enmEffOpSize)
6337 {
6338 case IEMMODE_16BIT:
6339 IEM_MC_BEGIN(1, 0);
6340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6341 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6342 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6343 IEM_MC_ADVANCE_RIP();
6344 IEM_MC_END();
6345 return VINF_SUCCESS;
6346
6347 case IEMMODE_32BIT:
6348 IEM_MC_BEGIN(1, 0);
6349 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6350 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6351 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6352 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6353 IEM_MC_ADVANCE_RIP();
6354 IEM_MC_END();
6355 return VINF_SUCCESS;
6356
6357 case IEMMODE_64BIT:
6358 IEM_MC_BEGIN(1, 0);
6359 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6360 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6361 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 return VINF_SUCCESS;
6365
6366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6367 }
6368}
6369
6370
6371/** Opcode 0x0f 0xc8. */
6372FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6373{
6374 IEMOP_MNEMONIC("bswap rAX/r8");
6375 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6376 prefix. REX.B is the correct prefix it appears. For a parallel
6377 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6378 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6379}
6380
6381
6382/** Opcode 0x0f 0xc9. */
6383FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6384{
6385 IEMOP_MNEMONIC("bswap rCX/r9");
6386 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6387}
6388
6389
6390/** Opcode 0x0f 0xca. */
6391FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6392{
6393 IEMOP_MNEMONIC("bswap rDX/r9");
6394 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6395}
6396
6397
6398/** Opcode 0x0f 0xcb. */
6399FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6400{
6401 IEMOP_MNEMONIC("bswap rBX/r9");
6402 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6403}
6404
6405
6406/** Opcode 0x0f 0xcc. */
6407FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6408{
6409 IEMOP_MNEMONIC("bswap rSP/r12");
6410 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6411}
6412
6413
6414/** Opcode 0x0f 0xcd. */
6415FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6416{
6417 IEMOP_MNEMONIC("bswap rBP/r13");
6418 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6419}
6420
6421
6422/** Opcode 0x0f 0xce. */
6423FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6424{
6425 IEMOP_MNEMONIC("bswap rSI/r14");
6426 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6427}
6428
6429
6430/** Opcode 0x0f 0xcf. */
6431FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6432{
6433 IEMOP_MNEMONIC("bswap rDI/r15");
6434 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6435}
6436
6437
6438
6439/** Opcode 0x0f 0xd0. */
6440FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6441/** Opcode 0x0f 0xd1. */
6442FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6443/** Opcode 0x0f 0xd2. */
6444FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6445/** Opcode 0x0f 0xd3. */
6446FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6447/** Opcode 0x0f 0xd4. */
6448FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6449/** Opcode 0x0f 0xd5. */
6450FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6451/** Opcode 0x0f 0xd6. */
6452FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6453
6454
6455/** Opcode 0x0f 0xd7. */
6456FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6457{
6458 /* Docs says register only. */
6459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6460 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6461 return IEMOP_RAISE_INVALID_OPCODE();
6462
6463 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6464 /** @todo testcase: Check that the instruction implicitly clears the high
6465 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6466 * and opcode modifications are made to work with the whole width (not
6467 * just 128). */
6468 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6469 {
6470 case IEM_OP_PRF_SIZE_OP: /* SSE */
6471 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6472 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6473 IEM_MC_BEGIN(2, 0);
6474 IEM_MC_ARG(uint64_t *, pDst, 0);
6475 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6477 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6478 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6479 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 case 0: /* MMX */
6485 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6486 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6487 IEM_MC_BEGIN(2, 0);
6488 IEM_MC_ARG(uint64_t *, pDst, 0);
6489 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6490 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6491 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6492 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6493 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 return VINF_SUCCESS;
6497
6498 default:
6499 return IEMOP_RAISE_INVALID_OPCODE();
6500 }
6501}
6502
6503
6504/** Opcode 0x0f 0xd8. */
6505FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6506/** Opcode 0x0f 0xd9. */
6507FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6508/** Opcode 0x0f 0xda. */
6509FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6510/** Opcode 0x0f 0xdb. */
6511FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6512/** Opcode 0x0f 0xdc. */
6513FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6514/** Opcode 0x0f 0xdd. */
6515FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6516/** Opcode 0x0f 0xde. */
6517FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6518/** Opcode 0x0f 0xdf. */
6519FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6520/** Opcode 0x0f 0xe0. */
6521FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6522/** Opcode 0x0f 0xe1. */
6523FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6524/** Opcode 0x0f 0xe2. */
6525FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6526/** Opcode 0x0f 0xe3. */
6527FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6528/** Opcode 0x0f 0xe4. */
6529FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6530/** Opcode 0x0f 0xe5. */
6531FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6532/** Opcode 0x0f 0xe6. */
6533FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6534/** Opcode 0x0f 0xe7. */
6535FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6536/** Opcode 0x0f 0xe8. */
6537FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6538/** Opcode 0x0f 0xe9. */
6539FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6540/** Opcode 0x0f 0xea. */
6541FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6542/** Opcode 0x0f 0xeb. */
6543FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6544/** Opcode 0x0f 0xec. */
6545FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6546/** Opcode 0x0f 0xed. */
6547FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6548/** Opcode 0x0f 0xee. */
6549FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6550
6551
6552/** Opcode 0x0f 0xef. */
6553FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6554{
6555 IEMOP_MNEMONIC("pxor");
6556 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6557}
6558
6559
6560/** Opcode 0x0f 0xf0. */
6561FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6562/** Opcode 0x0f 0xf1. */
6563FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6564/** Opcode 0x0f 0xf2. */
6565FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6566/** Opcode 0x0f 0xf3. */
6567FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6568/** Opcode 0x0f 0xf4. */
6569FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6570/** Opcode 0x0f 0xf5. */
6571FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6572/** Opcode 0x0f 0xf6. */
6573FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6574/** Opcode 0x0f 0xf7. */
6575FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6576/** Opcode 0x0f 0xf8. */
6577FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6578/** Opcode 0x0f 0xf9. */
6579FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6580/** Opcode 0x0f 0xfa. */
6581FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6582/** Opcode 0x0f 0xfb. */
6583FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6584/** Opcode 0x0f 0xfc. */
6585FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6586/** Opcode 0x0f 0xfd. */
6587FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6588/** Opcode 0x0f 0xfe. */
6589FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6590
6591
6592const PFNIEMOP g_apfnTwoByteMap[256] =
6593{
6594 /* 0x00 */ iemOp_Grp6,
6595 /* 0x01 */ iemOp_Grp7,
6596 /* 0x02 */ iemOp_lar_Gv_Ew,
6597 /* 0x03 */ iemOp_lsl_Gv_Ew,
6598 /* 0x04 */ iemOp_Invalid,
6599 /* 0x05 */ iemOp_syscall,
6600 /* 0x06 */ iemOp_clts,
6601 /* 0x07 */ iemOp_sysret,
6602 /* 0x08 */ iemOp_invd,
6603 /* 0x09 */ iemOp_wbinvd,
6604 /* 0x0a */ iemOp_Invalid,
6605 /* 0x0b */ iemOp_ud2,
6606 /* 0x0c */ iemOp_Invalid,
6607 /* 0x0d */ iemOp_nop_Ev_GrpP,
6608 /* 0x0e */ iemOp_femms,
6609 /* 0x0f */ iemOp_3Dnow,
6610 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6611 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6612 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6613 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6614 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6615 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6616 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6617 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6618 /* 0x18 */ iemOp_prefetch_Grp16,
6619 /* 0x19 */ iemOp_nop_Ev,
6620 /* 0x1a */ iemOp_nop_Ev,
6621 /* 0x1b */ iemOp_nop_Ev,
6622 /* 0x1c */ iemOp_nop_Ev,
6623 /* 0x1d */ iemOp_nop_Ev,
6624 /* 0x1e */ iemOp_nop_Ev,
6625 /* 0x1f */ iemOp_nop_Ev,
6626 /* 0x20 */ iemOp_mov_Rd_Cd,
6627 /* 0x21 */ iemOp_mov_Rd_Dd,
6628 /* 0x22 */ iemOp_mov_Cd_Rd,
6629 /* 0x23 */ iemOp_mov_Dd_Rd,
6630 /* 0x24 */ iemOp_mov_Rd_Td,
6631 /* 0x25 */ iemOp_Invalid,
6632 /* 0x26 */ iemOp_mov_Td_Rd,
6633 /* 0x27 */ iemOp_Invalid,
6634 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6635 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6636 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6637 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6638 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6639 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6640 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6641 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6642 /* 0x30 */ iemOp_wrmsr,
6643 /* 0x31 */ iemOp_rdtsc,
6644 /* 0x32 */ iemOp_rdmsr,
6645 /* 0x33 */ iemOp_rdpmc,
6646 /* 0x34 */ iemOp_sysenter,
6647 /* 0x35 */ iemOp_sysexit,
6648 /* 0x36 */ iemOp_Invalid,
6649 /* 0x37 */ iemOp_getsec,
6650 /* 0x38 */ iemOp_3byte_Esc_A4,
6651 /* 0x39 */ iemOp_Invalid,
6652 /* 0x3a */ iemOp_3byte_Esc_A5,
6653 /* 0x3b */ iemOp_Invalid,
6654 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6655 /* 0x3d */ iemOp_Invalid,
6656 /* 0x3e */ iemOp_Invalid,
6657 /* 0x3f */ iemOp_Invalid,
6658 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6659 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6660 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6661 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6662 /* 0x44 */ iemOp_cmove_Gv_Ev,
6663 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6664 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6665 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6666 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6667 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6668 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6669 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6670 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6671 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6672 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6673 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6674 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6675 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6676 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6677 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6678 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6679 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6680 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6681 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6682 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6683 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6684 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6685 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6686 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6687 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6688 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6689 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6690 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6691 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6692 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6693 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6694 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6695 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6696 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6697 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6698 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6699 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6700 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6701 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6702 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6703 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6704 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6705 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6706 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6707 /* 0x71 */ iemOp_Grp12,
6708 /* 0x72 */ iemOp_Grp13,
6709 /* 0x73 */ iemOp_Grp14,
6710 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6711 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6712 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6713 /* 0x77 */ iemOp_emms,
6714 /* 0x78 */ iemOp_vmread_AmdGrp17,
6715 /* 0x79 */ iemOp_vmwrite,
6716 /* 0x7a */ iemOp_Invalid,
6717 /* 0x7b */ iemOp_Invalid,
6718 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6719 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6720 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6721 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6722 /* 0x80 */ iemOp_jo_Jv,
6723 /* 0x81 */ iemOp_jno_Jv,
6724 /* 0x82 */ iemOp_jc_Jv,
6725 /* 0x83 */ iemOp_jnc_Jv,
6726 /* 0x84 */ iemOp_je_Jv,
6727 /* 0x85 */ iemOp_jne_Jv,
6728 /* 0x86 */ iemOp_jbe_Jv,
6729 /* 0x87 */ iemOp_jnbe_Jv,
6730 /* 0x88 */ iemOp_js_Jv,
6731 /* 0x89 */ iemOp_jns_Jv,
6732 /* 0x8a */ iemOp_jp_Jv,
6733 /* 0x8b */ iemOp_jnp_Jv,
6734 /* 0x8c */ iemOp_jl_Jv,
6735 /* 0x8d */ iemOp_jnl_Jv,
6736 /* 0x8e */ iemOp_jle_Jv,
6737 /* 0x8f */ iemOp_jnle_Jv,
6738 /* 0x90 */ iemOp_seto_Eb,
6739 /* 0x91 */ iemOp_setno_Eb,
6740 /* 0x92 */ iemOp_setc_Eb,
6741 /* 0x93 */ iemOp_setnc_Eb,
6742 /* 0x94 */ iemOp_sete_Eb,
6743 /* 0x95 */ iemOp_setne_Eb,
6744 /* 0x96 */ iemOp_setbe_Eb,
6745 /* 0x97 */ iemOp_setnbe_Eb,
6746 /* 0x98 */ iemOp_sets_Eb,
6747 /* 0x99 */ iemOp_setns_Eb,
6748 /* 0x9a */ iemOp_setp_Eb,
6749 /* 0x9b */ iemOp_setnp_Eb,
6750 /* 0x9c */ iemOp_setl_Eb,
6751 /* 0x9d */ iemOp_setnl_Eb,
6752 /* 0x9e */ iemOp_setle_Eb,
6753 /* 0x9f */ iemOp_setnle_Eb,
6754 /* 0xa0 */ iemOp_push_fs,
6755 /* 0xa1 */ iemOp_pop_fs,
6756 /* 0xa2 */ iemOp_cpuid,
6757 /* 0xa3 */ iemOp_bt_Ev_Gv,
6758 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6759 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6760 /* 0xa6 */ iemOp_Invalid,
6761 /* 0xa7 */ iemOp_Invalid,
6762 /* 0xa8 */ iemOp_push_gs,
6763 /* 0xa9 */ iemOp_pop_gs,
6764 /* 0xaa */ iemOp_rsm,
6765 /* 0xab */ iemOp_bts_Ev_Gv,
6766 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6767 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6768 /* 0xae */ iemOp_Grp15,
6769 /* 0xaf */ iemOp_imul_Gv_Ev,
6770 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6771 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6772 /* 0xb2 */ iemOp_lss_Gv_Mp,
6773 /* 0xb3 */ iemOp_btr_Ev_Gv,
6774 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6775 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6776 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6777 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6778 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6779 /* 0xb9 */ iemOp_Grp10,
6780 /* 0xba */ iemOp_Grp8,
6781 /* 0xbd */ iemOp_btc_Ev_Gv,
6782 /* 0xbc */ iemOp_bsf_Gv_Ev,
6783 /* 0xbd */ iemOp_bsr_Gv_Ev,
6784 /* 0xbe */ iemOp_movsx_Gv_Eb,
6785 /* 0xbf */ iemOp_movsx_Gv_Ew,
6786 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6787 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6788 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6789 /* 0xc3 */ iemOp_movnti_My_Gy,
6790 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6791 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6792 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6793 /* 0xc7 */ iemOp_Grp9,
6794 /* 0xc8 */ iemOp_bswap_rAX_r8,
6795 /* 0xc9 */ iemOp_bswap_rCX_r9,
6796 /* 0xca */ iemOp_bswap_rDX_r10,
6797 /* 0xcb */ iemOp_bswap_rBX_r11,
6798 /* 0xcc */ iemOp_bswap_rSP_r12,
6799 /* 0xcd */ iemOp_bswap_rBP_r13,
6800 /* 0xce */ iemOp_bswap_rSI_r14,
6801 /* 0xcf */ iemOp_bswap_rDI_r15,
6802 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6803 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6804 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6805 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6806 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6807 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6808 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6809 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6810 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6811 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6812 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6813 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6814 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6815 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6816 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6817 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6818 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6819 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6820 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6821 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6822 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6823 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6824 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6825 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6826 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6827 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6828 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6829 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6830 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6831 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6832 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6833 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6834 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6835 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6836 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6837 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6838 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6839 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6840 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6841 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6842 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6843 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6844 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6845 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6846 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6847 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6848 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6849 /* 0xff */ iemOp_Invalid
6850};
6851
6852/** @} */
6853
6854
6855/** @name One byte opcodes.
6856 *
6857 * @{
6858 */
6859
6860/** Opcode 0x00. */
6861FNIEMOP_DEF(iemOp_add_Eb_Gb)
6862{
6863 IEMOP_MNEMONIC("add Eb,Gb");
6864 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6865}
6866
6867
6868/** Opcode 0x01. */
6869FNIEMOP_DEF(iemOp_add_Ev_Gv)
6870{
6871 IEMOP_MNEMONIC("add Ev,Gv");
6872 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6873}
6874
6875
6876/** Opcode 0x02. */
6877FNIEMOP_DEF(iemOp_add_Gb_Eb)
6878{
6879 IEMOP_MNEMONIC("add Gb,Eb");
6880 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6881}
6882
6883
6884/** Opcode 0x03. */
6885FNIEMOP_DEF(iemOp_add_Gv_Ev)
6886{
6887 IEMOP_MNEMONIC("add Gv,Ev");
6888 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6889}
6890
6891
6892/** Opcode 0x04. */
6893FNIEMOP_DEF(iemOp_add_Al_Ib)
6894{
6895 IEMOP_MNEMONIC("add al,Ib");
6896 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
6897}
6898
6899
6900/** Opcode 0x05. */
6901FNIEMOP_DEF(iemOp_add_eAX_Iz)
6902{
6903 IEMOP_MNEMONIC("add rAX,Iz");
6904 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
6905}
6906
6907
6908/** Opcode 0x06. */
6909FNIEMOP_DEF(iemOp_push_ES)
6910{
6911 IEMOP_MNEMONIC("push es");
6912 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
6913}
6914
6915
6916/** Opcode 0x07. */
6917FNIEMOP_DEF(iemOp_pop_ES)
6918{
6919 IEMOP_MNEMONIC("pop es");
6920 IEMOP_HLP_NO_64BIT();
6921 IEMOP_HLP_NO_LOCK_PREFIX();
6922 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
6923}
6924
6925
6926/** Opcode 0x08. */
6927FNIEMOP_DEF(iemOp_or_Eb_Gb)
6928{
6929 IEMOP_MNEMONIC("or Eb,Gb");
6930 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6931 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
6932}
6933
6934
6935/** Opcode 0x09. */
6936FNIEMOP_DEF(iemOp_or_Ev_Gv)
6937{
6938 IEMOP_MNEMONIC("or Ev,Gv ");
6939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6940 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
6941}
6942
6943
6944/** Opcode 0x0a. */
6945FNIEMOP_DEF(iemOp_or_Gb_Eb)
6946{
6947 IEMOP_MNEMONIC("or Gb,Eb");
6948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6949 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
6950}
6951
6952
6953/** Opcode 0x0b. */
6954FNIEMOP_DEF(iemOp_or_Gv_Ev)
6955{
6956 IEMOP_MNEMONIC("or Gv,Ev");
6957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6958 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
6959}
6960
6961
6962/** Opcode 0x0c. */
6963FNIEMOP_DEF(iemOp_or_Al_Ib)
6964{
6965 IEMOP_MNEMONIC("or al,Ib");
6966 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6967 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
6968}
6969
6970
6971/** Opcode 0x0d. */
6972FNIEMOP_DEF(iemOp_or_eAX_Iz)
6973{
6974 IEMOP_MNEMONIC("or rAX,Iz");
6975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6976 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
6977}
6978
6979
6980/** Opcode 0x0e. */
6981FNIEMOP_DEF(iemOp_push_CS)
6982{
6983 IEMOP_MNEMONIC("push cs");
6984 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
6985}
6986
6987
6988/** Opcode 0x0f. */
6989FNIEMOP_DEF(iemOp_2byteEscape)
6990{
6991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6992 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
6993}
6994
6995/** Opcode 0x10. */
6996FNIEMOP_DEF(iemOp_adc_Eb_Gb)
6997{
6998 IEMOP_MNEMONIC("adc Eb,Gb");
6999 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7000}
7001
7002
7003/** Opcode 0x11. */
7004FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7005{
7006 IEMOP_MNEMONIC("adc Ev,Gv");
7007 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7008}
7009
7010
7011/** Opcode 0x12. */
7012FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7013{
7014 IEMOP_MNEMONIC("adc Gb,Eb");
7015 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7016}
7017
7018
7019/** Opcode 0x13. */
7020FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7021{
7022 IEMOP_MNEMONIC("adc Gv,Ev");
7023 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7024}
7025
7026
7027/** Opcode 0x14. */
7028FNIEMOP_DEF(iemOp_adc_Al_Ib)
7029{
7030 IEMOP_MNEMONIC("adc al,Ib");
7031 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7032}
7033
7034
7035/** Opcode 0x15. */
7036FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7037{
7038 IEMOP_MNEMONIC("adc rAX,Iz");
7039 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7040}
7041
7042
7043/** Opcode 0x16. */
7044FNIEMOP_DEF(iemOp_push_SS)
7045{
7046 IEMOP_MNEMONIC("push ss");
7047 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7048}
7049
7050
7051/** Opcode 0x17. */
7052FNIEMOP_DEF(iemOp_pop_SS)
7053{
7054 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7055 IEMOP_HLP_NO_LOCK_PREFIX();
7056 IEMOP_HLP_NO_64BIT();
7057 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7058}
7059
7060
7061/** Opcode 0x18. */
7062FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7063{
7064 IEMOP_MNEMONIC("sbb Eb,Gb");
7065 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7066}
7067
7068
7069/** Opcode 0x19. */
7070FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7071{
7072 IEMOP_MNEMONIC("sbb Ev,Gv");
7073 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7074}
7075
7076
7077/** Opcode 0x1a. */
7078FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7079{
7080 IEMOP_MNEMONIC("sbb Gb,Eb");
7081 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7082}
7083
7084
7085/** Opcode 0x1b. */
7086FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7087{
7088 IEMOP_MNEMONIC("sbb Gv,Ev");
7089 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7090}
7091
7092
7093/** Opcode 0x1c. */
7094FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7095{
7096 IEMOP_MNEMONIC("sbb al,Ib");
7097 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7098}
7099
7100
7101/** Opcode 0x1d. */
7102FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7103{
7104 IEMOP_MNEMONIC("sbb rAX,Iz");
7105 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7106}
7107
7108
7109/** Opcode 0x1e. */
7110FNIEMOP_DEF(iemOp_push_DS)
7111{
7112 IEMOP_MNEMONIC("push ds");
7113 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7114}
7115
7116
7117/** Opcode 0x1f. */
7118FNIEMOP_DEF(iemOp_pop_DS)
7119{
7120 IEMOP_MNEMONIC("pop ds");
7121 IEMOP_HLP_NO_LOCK_PREFIX();
7122 IEMOP_HLP_NO_64BIT();
7123 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7124}
7125
7126
7127/** Opcode 0x20. */
7128FNIEMOP_DEF(iemOp_and_Eb_Gb)
7129{
7130 IEMOP_MNEMONIC("and Eb,Gb");
7131 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7132 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7133}
7134
7135
7136/** Opcode 0x21. */
7137FNIEMOP_DEF(iemOp_and_Ev_Gv)
7138{
7139 IEMOP_MNEMONIC("and Ev,Gv");
7140 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7142}
7143
7144
7145/** Opcode 0x22. */
7146FNIEMOP_DEF(iemOp_and_Gb_Eb)
7147{
7148 IEMOP_MNEMONIC("and Gb,Eb");
7149 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7150 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7151}
7152
7153
7154/** Opcode 0x23. */
7155FNIEMOP_DEF(iemOp_and_Gv_Ev)
7156{
7157 IEMOP_MNEMONIC("and Gv,Ev");
7158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7160}
7161
7162
7163/** Opcode 0x24. */
7164FNIEMOP_DEF(iemOp_and_Al_Ib)
7165{
7166 IEMOP_MNEMONIC("and al,Ib");
7167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7168 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7169}
7170
7171
7172/** Opcode 0x25. */
7173FNIEMOP_DEF(iemOp_and_eAX_Iz)
7174{
7175 IEMOP_MNEMONIC("and rAX,Iz");
7176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7178}
7179
7180
7181/** Opcode 0x26. */
7182FNIEMOP_DEF(iemOp_seg_ES)
7183{
7184 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7185 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7186 pIemCpu->iEffSeg = X86_SREG_ES;
7187
7188 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7189 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7190}
7191
7192
7193/** Opcode 0x27. */
7194FNIEMOP_DEF(iemOp_daa)
7195{
7196 IEMOP_MNEMONIC("daa AL");
7197 IEMOP_HLP_NO_64BIT();
7198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7199 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7200 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7201}
7202
7203
7204/** Opcode 0x28. */
7205FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7206{
7207 IEMOP_MNEMONIC("sub Eb,Gb");
7208 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7209}
7210
7211
7212/** Opcode 0x29. */
7213FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7214{
7215 IEMOP_MNEMONIC("sub Ev,Gv");
7216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7217}
7218
7219
7220/** Opcode 0x2a. */
7221FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7222{
7223 IEMOP_MNEMONIC("sub Gb,Eb");
7224 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7225}
7226
7227
7228/** Opcode 0x2b. */
7229FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7230{
7231 IEMOP_MNEMONIC("sub Gv,Ev");
7232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7233}
7234
7235
7236/** Opcode 0x2c. */
7237FNIEMOP_DEF(iemOp_sub_Al_Ib)
7238{
7239 IEMOP_MNEMONIC("sub al,Ib");
7240 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7241}
7242
7243
7244/** Opcode 0x2d. */
7245FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7246{
7247 IEMOP_MNEMONIC("sub rAX,Iz");
7248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7249}
7250
7251
7252/** Opcode 0x2e. */
7253FNIEMOP_DEF(iemOp_seg_CS)
7254{
7255 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7256 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7257 pIemCpu->iEffSeg = X86_SREG_CS;
7258
7259 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7260 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7261}
7262
7263
7264/** Opcode 0x2f. */
7265FNIEMOP_DEF(iemOp_das)
7266{
7267 IEMOP_MNEMONIC("das AL");
7268 IEMOP_HLP_NO_64BIT();
7269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7271 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7272}
7273
7274
7275/** Opcode 0x30. */
7276FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7277{
7278 IEMOP_MNEMONIC("xor Eb,Gb");
7279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7280 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7281}
7282
7283
7284/** Opcode 0x31. */
7285FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7286{
7287 IEMOP_MNEMONIC("xor Ev,Gv");
7288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7289 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7290}
7291
7292
7293/** Opcode 0x32. */
7294FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7295{
7296 IEMOP_MNEMONIC("xor Gb,Eb");
7297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7298 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7299}
7300
7301
7302/** Opcode 0x33. */
7303FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7304{
7305 IEMOP_MNEMONIC("xor Gv,Ev");
7306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7307 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7308}
7309
7310
7311/** Opcode 0x34. */
7312FNIEMOP_DEF(iemOp_xor_Al_Ib)
7313{
7314 IEMOP_MNEMONIC("xor al,Ib");
7315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7317}
7318
7319
7320/** Opcode 0x35. */
7321FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7322{
7323 IEMOP_MNEMONIC("xor rAX,Iz");
7324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7325 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7326}
7327
7328
7329/** Opcode 0x36. */
7330FNIEMOP_DEF(iemOp_seg_SS)
7331{
7332 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7333 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7334 pIemCpu->iEffSeg = X86_SREG_SS;
7335
7336 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7337 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7338}
7339
7340
7341/** Opcode 0x37. */
7342FNIEMOP_STUB(iemOp_aaa);
7343
7344
7345/** Opcode 0x38. */
7346FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7347{
7348 IEMOP_MNEMONIC("cmp Eb,Gb");
7349 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7350 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7351}
7352
7353
7354/** Opcode 0x39. */
7355FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7356{
7357 IEMOP_MNEMONIC("cmp Ev,Gv");
7358 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7359 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7360}
7361
7362
7363/** Opcode 0x3a. */
7364FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7365{
7366 IEMOP_MNEMONIC("cmp Gb,Eb");
7367 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7368}
7369
7370
7371/** Opcode 0x3b. */
7372FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7373{
7374 IEMOP_MNEMONIC("cmp Gv,Ev");
7375 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7376}
7377
7378
7379/** Opcode 0x3c. */
7380FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7381{
7382 IEMOP_MNEMONIC("cmp al,Ib");
7383 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7384}
7385
7386
7387/** Opcode 0x3d. */
7388FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7389{
7390 IEMOP_MNEMONIC("cmp rAX,Iz");
7391 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7392}
7393
7394
7395/** Opcode 0x3e. */
7396FNIEMOP_DEF(iemOp_seg_DS)
7397{
7398 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7399 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7400 pIemCpu->iEffSeg = X86_SREG_DS;
7401
7402 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7403 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7404}
7405
7406
7407/** Opcode 0x3f. */
7408FNIEMOP_STUB(iemOp_aas);
7409
7410/**
7411 * Common 'inc/dec/not/neg register' helper.
7412 */
7413FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7414{
7415 IEMOP_HLP_NO_LOCK_PREFIX();
7416 switch (pIemCpu->enmEffOpSize)
7417 {
7418 case IEMMODE_16BIT:
7419 IEM_MC_BEGIN(2, 0);
7420 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7421 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7422 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7423 IEM_MC_REF_EFLAGS(pEFlags);
7424 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7425 IEM_MC_ADVANCE_RIP();
7426 IEM_MC_END();
7427 return VINF_SUCCESS;
7428
7429 case IEMMODE_32BIT:
7430 IEM_MC_BEGIN(2, 0);
7431 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7432 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7433 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7434 IEM_MC_REF_EFLAGS(pEFlags);
7435 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7436 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7437 IEM_MC_ADVANCE_RIP();
7438 IEM_MC_END();
7439 return VINF_SUCCESS;
7440
7441 case IEMMODE_64BIT:
7442 IEM_MC_BEGIN(2, 0);
7443 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7444 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7445 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7446 IEM_MC_REF_EFLAGS(pEFlags);
7447 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7448 IEM_MC_ADVANCE_RIP();
7449 IEM_MC_END();
7450 return VINF_SUCCESS;
7451 }
7452 return VINF_SUCCESS;
7453}
7454
7455
7456/** Opcode 0x40. */
7457FNIEMOP_DEF(iemOp_inc_eAX)
7458{
7459 /*
7460 * This is a REX prefix in 64-bit mode.
7461 */
7462 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7463 {
7464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7465 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7466
7467 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7468 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7469 }
7470
7471 IEMOP_MNEMONIC("inc eAX");
7472 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7473}
7474
7475
7476/** Opcode 0x41. */
7477FNIEMOP_DEF(iemOp_inc_eCX)
7478{
7479 /*
7480 * This is a REX prefix in 64-bit mode.
7481 */
7482 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7483 {
7484 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7485 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7486 pIemCpu->uRexB = 1 << 3;
7487
7488 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7489 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7490 }
7491
7492 IEMOP_MNEMONIC("inc eCX");
7493 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7494}
7495
7496
7497/** Opcode 0x42. */
7498FNIEMOP_DEF(iemOp_inc_eDX)
7499{
7500 /*
7501 * This is a REX prefix in 64-bit mode.
7502 */
7503 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7504 {
7505 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7506 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7507 pIemCpu->uRexIndex = 1 << 3;
7508
7509 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7510 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7511 }
7512
7513 IEMOP_MNEMONIC("inc eDX");
7514 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7515}
7516
7517
7518
7519/** Opcode 0x43. */
7520FNIEMOP_DEF(iemOp_inc_eBX)
7521{
7522 /*
7523 * This is a REX prefix in 64-bit mode.
7524 */
7525 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7526 {
7527 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7528 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7529 pIemCpu->uRexB = 1 << 3;
7530 pIemCpu->uRexIndex = 1 << 3;
7531
7532 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7533 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7534 }
7535
7536 IEMOP_MNEMONIC("inc eBX");
7537 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7538}
7539
7540
7541/** Opcode 0x44. */
7542FNIEMOP_DEF(iemOp_inc_eSP)
7543{
7544 /*
7545 * This is a REX prefix in 64-bit mode.
7546 */
7547 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7548 {
7549 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7550 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7551 pIemCpu->uRexReg = 1 << 3;
7552
7553 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7554 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7555 }
7556
7557 IEMOP_MNEMONIC("inc eSP");
7558 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7559}
7560
7561
7562/** Opcode 0x45. */
7563FNIEMOP_DEF(iemOp_inc_eBP)
7564{
7565 /*
7566 * This is a REX prefix in 64-bit mode.
7567 */
7568 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7569 {
7570 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7571 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7572 pIemCpu->uRexReg = 1 << 3;
7573 pIemCpu->uRexB = 1 << 3;
7574
7575 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7576 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7577 }
7578
7579 IEMOP_MNEMONIC("inc eBP");
7580 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7581}
7582
7583
7584/** Opcode 0x46. */
7585FNIEMOP_DEF(iemOp_inc_eSI)
7586{
7587 /*
7588 * This is a REX prefix in 64-bit mode.
7589 */
7590 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7591 {
7592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7593 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7594 pIemCpu->uRexReg = 1 << 3;
7595 pIemCpu->uRexIndex = 1 << 3;
7596
7597 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7598 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7599 }
7600
7601 IEMOP_MNEMONIC("inc eSI");
7602 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7603}
7604
7605
7606/** Opcode 0x47. */
7607FNIEMOP_DEF(iemOp_inc_eDI)
7608{
7609 /*
7610 * This is a REX prefix in 64-bit mode.
7611 */
7612 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7613 {
7614 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7615 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7616 pIemCpu->uRexReg = 1 << 3;
7617 pIemCpu->uRexB = 1 << 3;
7618 pIemCpu->uRexIndex = 1 << 3;
7619
7620 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7621 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7622 }
7623
7624 IEMOP_MNEMONIC("inc eDI");
7625 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7626}
7627
7628
7629/** Opcode 0x48. */
7630FNIEMOP_DEF(iemOp_dec_eAX)
7631{
7632 /*
7633 * This is a REX prefix in 64-bit mode.
7634 */
7635 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7636 {
7637 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7638 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7639 iemRecalEffOpSize(pIemCpu);
7640
7641 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7642 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7643 }
7644
7645 IEMOP_MNEMONIC("dec eAX");
7646 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7647}
7648
7649
7650/** Opcode 0x49. */
7651FNIEMOP_DEF(iemOp_dec_eCX)
7652{
7653 /*
7654 * This is a REX prefix in 64-bit mode.
7655 */
7656 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7657 {
7658 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7659 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7660 pIemCpu->uRexB = 1 << 3;
7661 iemRecalEffOpSize(pIemCpu);
7662
7663 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7664 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7665 }
7666
7667 IEMOP_MNEMONIC("dec eCX");
7668 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7669}
7670
7671
7672/** Opcode 0x4a. */
7673FNIEMOP_DEF(iemOp_dec_eDX)
7674{
7675 /*
7676 * This is a REX prefix in 64-bit mode.
7677 */
7678 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7679 {
7680 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7681 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7682 pIemCpu->uRexIndex = 1 << 3;
7683 iemRecalEffOpSize(pIemCpu);
7684
7685 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7686 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7687 }
7688
7689 IEMOP_MNEMONIC("dec eDX");
7690 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7691}
7692
7693
7694/** Opcode 0x4b. */
7695FNIEMOP_DEF(iemOp_dec_eBX)
7696{
7697 /*
7698 * This is a REX prefix in 64-bit mode.
7699 */
7700 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7701 {
7702 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7703 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7704 pIemCpu->uRexB = 1 << 3;
7705 pIemCpu->uRexIndex = 1 << 3;
7706 iemRecalEffOpSize(pIemCpu);
7707
7708 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7709 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7710 }
7711
7712 IEMOP_MNEMONIC("dec eBX");
7713 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7714}
7715
7716
7717/** Opcode 0x4c. */
7718FNIEMOP_DEF(iemOp_dec_eSP)
7719{
7720 /*
7721 * This is a REX prefix in 64-bit mode.
7722 */
7723 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7724 {
7725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7726 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7727 pIemCpu->uRexReg = 1 << 3;
7728 iemRecalEffOpSize(pIemCpu);
7729
7730 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7731 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7732 }
7733
7734 IEMOP_MNEMONIC("dec eSP");
7735 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7736}
7737
7738
7739/** Opcode 0x4d. */
7740FNIEMOP_DEF(iemOp_dec_eBP)
7741{
7742 /*
7743 * This is a REX prefix in 64-bit mode.
7744 */
7745 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7746 {
7747 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7748 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7749 pIemCpu->uRexReg = 1 << 3;
7750 pIemCpu->uRexB = 1 << 3;
7751 iemRecalEffOpSize(pIemCpu);
7752
7753 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7754 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7755 }
7756
7757 IEMOP_MNEMONIC("dec eBP");
7758 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7759}
7760
7761
7762/** Opcode 0x4e. */
7763FNIEMOP_DEF(iemOp_dec_eSI)
7764{
7765 /*
7766 * This is a REX prefix in 64-bit mode.
7767 */
7768 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7769 {
7770 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7771 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7772 pIemCpu->uRexReg = 1 << 3;
7773 pIemCpu->uRexIndex = 1 << 3;
7774 iemRecalEffOpSize(pIemCpu);
7775
7776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7777 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7778 }
7779
7780 IEMOP_MNEMONIC("dec eSI");
7781 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7782}
7783
7784
7785/** Opcode 0x4f. */
7786FNIEMOP_DEF(iemOp_dec_eDI)
7787{
7788 /*
7789 * This is a REX prefix in 64-bit mode.
7790 */
7791 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7792 {
7793 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7794 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7795 pIemCpu->uRexReg = 1 << 3;
7796 pIemCpu->uRexB = 1 << 3;
7797 pIemCpu->uRexIndex = 1 << 3;
7798 iemRecalEffOpSize(pIemCpu);
7799
7800 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7801 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7802 }
7803
7804 IEMOP_MNEMONIC("dec eDI");
7805 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7806}
7807
7808
7809/**
7810 * Common 'push register' helper.
7811 */
7812FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7813{
7814 IEMOP_HLP_NO_LOCK_PREFIX();
7815 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7816 {
7817 iReg |= pIemCpu->uRexB;
7818 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7819 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7820 }
7821
7822 switch (pIemCpu->enmEffOpSize)
7823 {
7824 case IEMMODE_16BIT:
7825 IEM_MC_BEGIN(0, 1);
7826 IEM_MC_LOCAL(uint16_t, u16Value);
7827 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7828 IEM_MC_PUSH_U16(u16Value);
7829 IEM_MC_ADVANCE_RIP();
7830 IEM_MC_END();
7831 break;
7832
7833 case IEMMODE_32BIT:
7834 IEM_MC_BEGIN(0, 1);
7835 IEM_MC_LOCAL(uint32_t, u32Value);
7836 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7837 IEM_MC_PUSH_U32(u32Value);
7838 IEM_MC_ADVANCE_RIP();
7839 IEM_MC_END();
7840 break;
7841
7842 case IEMMODE_64BIT:
7843 IEM_MC_BEGIN(0, 1);
7844 IEM_MC_LOCAL(uint64_t, u64Value);
7845 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7846 IEM_MC_PUSH_U64(u64Value);
7847 IEM_MC_ADVANCE_RIP();
7848 IEM_MC_END();
7849 break;
7850 }
7851
7852 return VINF_SUCCESS;
7853}
7854
7855
7856/** Opcode 0x50. */
7857FNIEMOP_DEF(iemOp_push_eAX)
7858{
7859 IEMOP_MNEMONIC("push rAX");
7860 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7861}
7862
7863
7864/** Opcode 0x51. */
7865FNIEMOP_DEF(iemOp_push_eCX)
7866{
7867 IEMOP_MNEMONIC("push rCX");
7868 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7869}
7870
7871
7872/** Opcode 0x52. */
7873FNIEMOP_DEF(iemOp_push_eDX)
7874{
7875 IEMOP_MNEMONIC("push rDX");
7876 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7877}
7878
7879
7880/** Opcode 0x53. */
7881FNIEMOP_DEF(iemOp_push_eBX)
7882{
7883 IEMOP_MNEMONIC("push rBX");
7884 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7885}
7886
7887
7888/** Opcode 0x54. */
7889FNIEMOP_DEF(iemOp_push_eSP)
7890{
7891 IEMOP_MNEMONIC("push rSP");
7892 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
7893}
7894
7895
7896/** Opcode 0x55. */
7897FNIEMOP_DEF(iemOp_push_eBP)
7898{
7899 IEMOP_MNEMONIC("push rBP");
7900 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
7901}
7902
7903
7904/** Opcode 0x56. */
7905FNIEMOP_DEF(iemOp_push_eSI)
7906{
7907 IEMOP_MNEMONIC("push rSI");
7908 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
7909}
7910
7911
7912/** Opcode 0x57. */
7913FNIEMOP_DEF(iemOp_push_eDI)
7914{
7915 IEMOP_MNEMONIC("push rDI");
7916 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
7917}
7918
7919
7920/**
7921 * Common 'pop register' helper.
7922 */
7923FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
7924{
7925 IEMOP_HLP_NO_LOCK_PREFIX();
7926 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7927 {
7928 iReg |= pIemCpu->uRexB;
7929 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7930 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7931 }
7932
7933 switch (pIemCpu->enmEffOpSize)
7934 {
7935 case IEMMODE_16BIT:
7936 IEM_MC_BEGIN(0, 1);
7937 IEM_MC_LOCAL(uint16_t, *pu16Dst);
7938 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7939 IEM_MC_POP_U16(pu16Dst);
7940 IEM_MC_ADVANCE_RIP();
7941 IEM_MC_END();
7942 break;
7943
7944 case IEMMODE_32BIT:
7945 IEM_MC_BEGIN(0, 1);
7946 IEM_MC_LOCAL(uint32_t, *pu32Dst);
7947 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7948 IEM_MC_POP_U32(pu32Dst);
7949 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
7950 IEM_MC_ADVANCE_RIP();
7951 IEM_MC_END();
7952 break;
7953
7954 case IEMMODE_64BIT:
7955 IEM_MC_BEGIN(0, 1);
7956 IEM_MC_LOCAL(uint64_t, *pu64Dst);
7957 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7958 IEM_MC_POP_U64(pu64Dst);
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 break;
7962 }
7963
7964 return VINF_SUCCESS;
7965}
7966
7967
7968/** Opcode 0x58. */
7969FNIEMOP_DEF(iemOp_pop_eAX)
7970{
7971 IEMOP_MNEMONIC("pop rAX");
7972 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
7973}
7974
7975
7976/** Opcode 0x59. */
7977FNIEMOP_DEF(iemOp_pop_eCX)
7978{
7979 IEMOP_MNEMONIC("pop rCX");
7980 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
7981}
7982
7983
7984/** Opcode 0x5a. */
7985FNIEMOP_DEF(iemOp_pop_eDX)
7986{
7987 IEMOP_MNEMONIC("pop rDX");
7988 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
7989}
7990
7991
7992/** Opcode 0x5b. */
7993FNIEMOP_DEF(iemOp_pop_eBX)
7994{
7995 IEMOP_MNEMONIC("pop rBX");
7996 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
7997}
7998
7999
8000/** Opcode 0x5c. */
8001FNIEMOP_DEF(iemOp_pop_eSP)
8002{
8003 IEMOP_MNEMONIC("pop rSP");
8004 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8005 {
8006 if (pIemCpu->uRexB)
8007 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8008 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8009 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8010 }
8011
8012 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8013 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8014 /** @todo add testcase for this instruction. */
8015 switch (pIemCpu->enmEffOpSize)
8016 {
8017 case IEMMODE_16BIT:
8018 IEM_MC_BEGIN(0, 1);
8019 IEM_MC_LOCAL(uint16_t, u16Dst);
8020 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8021 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8022 IEM_MC_ADVANCE_RIP();
8023 IEM_MC_END();
8024 break;
8025
8026 case IEMMODE_32BIT:
8027 IEM_MC_BEGIN(0, 1);
8028 IEM_MC_LOCAL(uint32_t, u32Dst);
8029 IEM_MC_POP_U32(&u32Dst);
8030 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8031 IEM_MC_ADVANCE_RIP();
8032 IEM_MC_END();
8033 break;
8034
8035 case IEMMODE_64BIT:
8036 IEM_MC_BEGIN(0, 1);
8037 IEM_MC_LOCAL(uint64_t, u64Dst);
8038 IEM_MC_POP_U64(&u64Dst);
8039 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 break;
8043 }
8044
8045 return VINF_SUCCESS;
8046}
8047
8048
8049/** Opcode 0x5d. */
8050FNIEMOP_DEF(iemOp_pop_eBP)
8051{
8052 IEMOP_MNEMONIC("pop rBP");
8053 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8054}
8055
8056
8057/** Opcode 0x5e. */
8058FNIEMOP_DEF(iemOp_pop_eSI)
8059{
8060 IEMOP_MNEMONIC("pop rSI");
8061 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8062}
8063
8064
8065/** Opcode 0x5f. */
8066FNIEMOP_DEF(iemOp_pop_eDI)
8067{
8068 IEMOP_MNEMONIC("pop rDI");
8069 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8070}
8071
8072
8073/** Opcode 0x60. */
8074FNIEMOP_DEF(iemOp_pusha)
8075{
8076 IEMOP_MNEMONIC("pusha");
8077 IEMOP_HLP_NO_64BIT();
8078 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8079 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8080 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8082}
8083
8084
8085/** Opcode 0x61. */
8086FNIEMOP_DEF(iemOp_popa)
8087{
8088 IEMOP_MNEMONIC("popa");
8089 IEMOP_HLP_NO_64BIT();
8090 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8091 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8092 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8093 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8094}
8095
8096
8097/** Opcode 0x62. */
8098FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8099
8100
8101/** Opcode 0x63 - non-64-bit modes. */
8102FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8103{
8104 IEMOP_MNEMONIC("arpl Ew,Gw");
8105 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8107
8108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8109 {
8110 /* Register */
8111 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8112 IEM_MC_BEGIN(3, 0);
8113 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8114 IEM_MC_ARG(uint16_t, u16Src, 1);
8115 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8116
8117 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8118 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8119 IEM_MC_REF_EFLAGS(pEFlags);
8120 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8121
8122 IEM_MC_ADVANCE_RIP();
8123 IEM_MC_END();
8124 }
8125 else
8126 {
8127 /* Memory */
8128 IEM_MC_BEGIN(3, 2);
8129 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8130 IEM_MC_ARG(uint16_t, u16Src, 1);
8131 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8133
8134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8135 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8136 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8137 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8138 IEM_MC_FETCH_EFLAGS(EFlags);
8139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8140
8141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8142 IEM_MC_COMMIT_EFLAGS(EFlags);
8143 IEM_MC_ADVANCE_RIP();
8144 IEM_MC_END();
8145 }
8146 return VINF_SUCCESS;
8147
8148}
8149
8150
8151/** Opcode 0x63.
8152 * @note This is a weird one. It works like a regular move instruction if
8153 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8154 * @todo This definitely needs a testcase to verify the odd cases. */
8155FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8156{
8157 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8158
8159 IEMOP_MNEMONIC("movsxd Gv,Ev");
8160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8161
8162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8163 {
8164 /*
8165 * Register to register.
8166 */
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168 IEM_MC_BEGIN(0, 1);
8169 IEM_MC_LOCAL(uint64_t, u64Value);
8170 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8171 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8172 IEM_MC_ADVANCE_RIP();
8173 IEM_MC_END();
8174 }
8175 else
8176 {
8177 /*
8178 * We're loading a register from memory.
8179 */
8180 IEM_MC_BEGIN(0, 2);
8181 IEM_MC_LOCAL(uint64_t, u64Value);
8182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8185 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8186 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8187 IEM_MC_ADVANCE_RIP();
8188 IEM_MC_END();
8189 }
8190 return VINF_SUCCESS;
8191}
8192
8193
8194/** Opcode 0x64. */
8195FNIEMOP_DEF(iemOp_seg_FS)
8196{
8197 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8198 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8199 pIemCpu->iEffSeg = X86_SREG_FS;
8200
8201 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8202 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8203}
8204
8205
8206/** Opcode 0x65. */
8207FNIEMOP_DEF(iemOp_seg_GS)
8208{
8209 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8210 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8211 pIemCpu->iEffSeg = X86_SREG_GS;
8212
8213 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8214 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8215}
8216
8217
8218/** Opcode 0x66. */
8219FNIEMOP_DEF(iemOp_op_size)
8220{
8221 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8222 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8223 iemRecalEffOpSize(pIemCpu);
8224
8225 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8226 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8227}
8228
8229
8230/** Opcode 0x67. */
8231FNIEMOP_DEF(iemOp_addr_size)
8232{
8233 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8234 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8235 switch (pIemCpu->enmDefAddrMode)
8236 {
8237 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8238 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8239 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8240 default: AssertFailed();
8241 }
8242
8243 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8244 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8245}
8246
8247
8248/** Opcode 0x68. */
8249FNIEMOP_DEF(iemOp_push_Iz)
8250{
8251 IEMOP_MNEMONIC("push Iz");
8252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8253 switch (pIemCpu->enmEffOpSize)
8254 {
8255 case IEMMODE_16BIT:
8256 {
8257 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8258 IEMOP_HLP_NO_LOCK_PREFIX();
8259 IEM_MC_BEGIN(0,0);
8260 IEM_MC_PUSH_U16(u16Imm);
8261 IEM_MC_ADVANCE_RIP();
8262 IEM_MC_END();
8263 return VINF_SUCCESS;
8264 }
8265
8266 case IEMMODE_32BIT:
8267 {
8268 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8269 IEMOP_HLP_NO_LOCK_PREFIX();
8270 IEM_MC_BEGIN(0,0);
8271 IEM_MC_PUSH_U32(u32Imm);
8272 IEM_MC_ADVANCE_RIP();
8273 IEM_MC_END();
8274 return VINF_SUCCESS;
8275 }
8276
8277 case IEMMODE_64BIT:
8278 {
8279 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8280 IEMOP_HLP_NO_LOCK_PREFIX();
8281 IEM_MC_BEGIN(0,0);
8282 IEM_MC_PUSH_U64(u64Imm);
8283 IEM_MC_ADVANCE_RIP();
8284 IEM_MC_END();
8285 return VINF_SUCCESS;
8286 }
8287
8288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8289 }
8290}
8291
8292
8293/** Opcode 0x69. */
8294FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8295{
8296 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8299
8300 switch (pIemCpu->enmEffOpSize)
8301 {
8302 case IEMMODE_16BIT:
8303 {
8304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8305 {
8306 /* register operand */
8307 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8309
8310 IEM_MC_BEGIN(3, 1);
8311 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8312 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8313 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8314 IEM_MC_LOCAL(uint16_t, u16Tmp);
8315
8316 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8317 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8318 IEM_MC_REF_EFLAGS(pEFlags);
8319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8320 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8321
8322 IEM_MC_ADVANCE_RIP();
8323 IEM_MC_END();
8324 }
8325 else
8326 {
8327 /* memory operand */
8328 IEM_MC_BEGIN(3, 2);
8329 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8330 IEM_MC_ARG(uint16_t, u16Src, 1);
8331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8332 IEM_MC_LOCAL(uint16_t, u16Tmp);
8333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8334
8335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8336 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8337 IEM_MC_ASSIGN(u16Src, u16Imm);
8338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8339 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8340 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8341 IEM_MC_REF_EFLAGS(pEFlags);
8342 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8343 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8344
8345 IEM_MC_ADVANCE_RIP();
8346 IEM_MC_END();
8347 }
8348 return VINF_SUCCESS;
8349 }
8350
8351 case IEMMODE_32BIT:
8352 {
8353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8354 {
8355 /* register operand */
8356 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8358
8359 IEM_MC_BEGIN(3, 1);
8360 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8361 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8362 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8363 IEM_MC_LOCAL(uint32_t, u32Tmp);
8364
8365 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8366 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8367 IEM_MC_REF_EFLAGS(pEFlags);
8368 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8369 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8370
8371 IEM_MC_ADVANCE_RIP();
8372 IEM_MC_END();
8373 }
8374 else
8375 {
8376 /* memory operand */
8377 IEM_MC_BEGIN(3, 2);
8378 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8379 IEM_MC_ARG(uint32_t, u32Src, 1);
8380 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8381 IEM_MC_LOCAL(uint32_t, u32Tmp);
8382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8383
8384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8385 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8386 IEM_MC_ASSIGN(u32Src, u32Imm);
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8389 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8390 IEM_MC_REF_EFLAGS(pEFlags);
8391 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8392 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8393
8394 IEM_MC_ADVANCE_RIP();
8395 IEM_MC_END();
8396 }
8397 return VINF_SUCCESS;
8398 }
8399
8400 case IEMMODE_64BIT:
8401 {
8402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8403 {
8404 /* register operand */
8405 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8407
8408 IEM_MC_BEGIN(3, 1);
8409 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8410 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8411 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8412 IEM_MC_LOCAL(uint64_t, u64Tmp);
8413
8414 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8415 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8416 IEM_MC_REF_EFLAGS(pEFlags);
8417 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8418 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8419
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 }
8423 else
8424 {
8425 /* memory operand */
8426 IEM_MC_BEGIN(3, 2);
8427 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8428 IEM_MC_ARG(uint64_t, u64Src, 1);
8429 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8430 IEM_MC_LOCAL(uint64_t, u64Tmp);
8431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8432
8433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8434 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8435 IEM_MC_ASSIGN(u64Src, u64Imm);
8436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8437 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8438 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8439 IEM_MC_REF_EFLAGS(pEFlags);
8440 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8441 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8442
8443 IEM_MC_ADVANCE_RIP();
8444 IEM_MC_END();
8445 }
8446 return VINF_SUCCESS;
8447 }
8448 }
8449 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8450}
8451
8452
8453/** Opcode 0x6a. */
8454FNIEMOP_DEF(iemOp_push_Ib)
8455{
8456 IEMOP_MNEMONIC("push Ib");
8457 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8458 IEMOP_HLP_NO_LOCK_PREFIX();
8459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8460
8461 IEM_MC_BEGIN(0,0);
8462 switch (pIemCpu->enmEffOpSize)
8463 {
8464 case IEMMODE_16BIT:
8465 IEM_MC_PUSH_U16(i8Imm);
8466 break;
8467 case IEMMODE_32BIT:
8468 IEM_MC_PUSH_U32(i8Imm);
8469 break;
8470 case IEMMODE_64BIT:
8471 IEM_MC_PUSH_U64(i8Imm);
8472 break;
8473 }
8474 IEM_MC_ADVANCE_RIP();
8475 IEM_MC_END();
8476 return VINF_SUCCESS;
8477}
8478
8479
8480/** Opcode 0x6b. */
8481FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8482{
8483 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8485 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8486
8487 switch (pIemCpu->enmEffOpSize)
8488 {
8489 case IEMMODE_16BIT:
8490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8491 {
8492 /* register operand */
8493 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8495
8496 IEM_MC_BEGIN(3, 1);
8497 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8498 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8499 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8500 IEM_MC_LOCAL(uint16_t, u16Tmp);
8501
8502 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8503 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8504 IEM_MC_REF_EFLAGS(pEFlags);
8505 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8506 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8507
8508 IEM_MC_ADVANCE_RIP();
8509 IEM_MC_END();
8510 }
8511 else
8512 {
8513 /* memory operand */
8514 IEM_MC_BEGIN(3, 2);
8515 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8516 IEM_MC_ARG(uint16_t, u16Src, 1);
8517 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8518 IEM_MC_LOCAL(uint16_t, u16Tmp);
8519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8520
8521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8522 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8523 IEM_MC_ASSIGN(u16Src, u16Imm);
8524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8525 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8526 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8527 IEM_MC_REF_EFLAGS(pEFlags);
8528 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8529 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8530
8531 IEM_MC_ADVANCE_RIP();
8532 IEM_MC_END();
8533 }
8534 return VINF_SUCCESS;
8535
8536 case IEMMODE_32BIT:
8537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8538 {
8539 /* register operand */
8540 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542
8543 IEM_MC_BEGIN(3, 1);
8544 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8545 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8546 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8547 IEM_MC_LOCAL(uint32_t, u32Tmp);
8548
8549 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8550 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8551 IEM_MC_REF_EFLAGS(pEFlags);
8552 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8553 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8554
8555 IEM_MC_ADVANCE_RIP();
8556 IEM_MC_END();
8557 }
8558 else
8559 {
8560 /* memory operand */
8561 IEM_MC_BEGIN(3, 2);
8562 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8563 IEM_MC_ARG(uint32_t, u32Src, 1);
8564 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8565 IEM_MC_LOCAL(uint32_t, u32Tmp);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8567
8568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8569 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8570 IEM_MC_ASSIGN(u32Src, u32Imm);
8571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8572 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8573 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8574 IEM_MC_REF_EFLAGS(pEFlags);
8575 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8576 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8577
8578 IEM_MC_ADVANCE_RIP();
8579 IEM_MC_END();
8580 }
8581 return VINF_SUCCESS;
8582
8583 case IEMMODE_64BIT:
8584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8585 {
8586 /* register operand */
8587 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8589
8590 IEM_MC_BEGIN(3, 1);
8591 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8592 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8594 IEM_MC_LOCAL(uint64_t, u64Tmp);
8595
8596 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8597 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8598 IEM_MC_REF_EFLAGS(pEFlags);
8599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8600 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8601
8602 IEM_MC_ADVANCE_RIP();
8603 IEM_MC_END();
8604 }
8605 else
8606 {
8607 /* memory operand */
8608 IEM_MC_BEGIN(3, 2);
8609 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8610 IEM_MC_ARG(uint64_t, u64Src, 1);
8611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8612 IEM_MC_LOCAL(uint64_t, u64Tmp);
8613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8614
8615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8616 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8617 IEM_MC_ASSIGN(u64Src, u64Imm);
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8620 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8621 IEM_MC_REF_EFLAGS(pEFlags);
8622 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8623 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8624
8625 IEM_MC_ADVANCE_RIP();
8626 IEM_MC_END();
8627 }
8628 return VINF_SUCCESS;
8629 }
8630 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8631}
8632
8633
8634/** Opcode 0x6c. */
8635FNIEMOP_DEF(iemOp_insb_Yb_DX)
8636{
8637 IEMOP_HLP_NO_LOCK_PREFIX();
8638 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8639 {
8640 IEMOP_MNEMONIC("rep ins Yb,DX");
8641 switch (pIemCpu->enmEffAddrMode)
8642 {
8643 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8644 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8645 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8647 }
8648 }
8649 else
8650 {
8651 IEMOP_MNEMONIC("ins Yb,DX");
8652 switch (pIemCpu->enmEffAddrMode)
8653 {
8654 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8655 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8656 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8658 }
8659 }
8660}
8661
8662
8663/** Opcode 0x6d. */
8664FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8665{
8666 IEMOP_HLP_NO_LOCK_PREFIX();
8667 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8668 {
8669 IEMOP_MNEMONIC("rep ins Yv,DX");
8670 switch (pIemCpu->enmEffOpSize)
8671 {
8672 case IEMMODE_16BIT:
8673 switch (pIemCpu->enmEffAddrMode)
8674 {
8675 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8676 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8677 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8679 }
8680 break;
8681 case IEMMODE_64BIT:
8682 case IEMMODE_32BIT:
8683 switch (pIemCpu->enmEffAddrMode)
8684 {
8685 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8689 }
8690 break;
8691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8692 }
8693 }
8694 else
8695 {
8696 IEMOP_MNEMONIC("ins Yv,DX");
8697 switch (pIemCpu->enmEffOpSize)
8698 {
8699 case IEMMODE_16BIT:
8700 switch (pIemCpu->enmEffAddrMode)
8701 {
8702 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8703 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8704 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8706 }
8707 break;
8708 case IEMMODE_64BIT:
8709 case IEMMODE_32BIT:
8710 switch (pIemCpu->enmEffAddrMode)
8711 {
8712 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8713 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8714 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8716 }
8717 break;
8718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8719 }
8720 }
8721}
8722
8723
8724/** Opcode 0x6e. */
8725FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8726{
8727 IEMOP_HLP_NO_LOCK_PREFIX();
8728 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8729 {
8730 IEMOP_MNEMONIC("rep out DX,Yb");
8731 switch (pIemCpu->enmEffAddrMode)
8732 {
8733 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8734 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8735 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8737 }
8738 }
8739 else
8740 {
8741 IEMOP_MNEMONIC("out DX,Yb");
8742 switch (pIemCpu->enmEffAddrMode)
8743 {
8744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8748 }
8749 }
8750}
8751
8752
8753/** Opcode 0x6f. */
8754FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8755{
8756 IEMOP_HLP_NO_LOCK_PREFIX();
8757 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8758 {
8759 IEMOP_MNEMONIC("rep outs DX,Yv");
8760 switch (pIemCpu->enmEffOpSize)
8761 {
8762 case IEMMODE_16BIT:
8763 switch (pIemCpu->enmEffAddrMode)
8764 {
8765 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8766 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8767 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8769 }
8770 break;
8771 case IEMMODE_64BIT:
8772 case IEMMODE_32BIT:
8773 switch (pIemCpu->enmEffAddrMode)
8774 {
8775 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8776 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8777 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8779 }
8780 break;
8781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8782 }
8783 }
8784 else
8785 {
8786 IEMOP_MNEMONIC("outs DX,Yv");
8787 switch (pIemCpu->enmEffOpSize)
8788 {
8789 case IEMMODE_16BIT:
8790 switch (pIemCpu->enmEffAddrMode)
8791 {
8792 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8793 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8794 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8796 }
8797 break;
8798 case IEMMODE_64BIT:
8799 case IEMMODE_32BIT:
8800 switch (pIemCpu->enmEffAddrMode)
8801 {
8802 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8803 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8804 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8806 }
8807 break;
8808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8809 }
8810 }
8811}
8812
8813
8814/** Opcode 0x70. */
8815FNIEMOP_DEF(iemOp_jo_Jb)
8816{
8817 IEMOP_MNEMONIC("jo Jb");
8818 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8819 IEMOP_HLP_NO_LOCK_PREFIX();
8820 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8821
8822 IEM_MC_BEGIN(0, 0);
8823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8824 IEM_MC_REL_JMP_S8(i8Imm);
8825 } IEM_MC_ELSE() {
8826 IEM_MC_ADVANCE_RIP();
8827 } IEM_MC_ENDIF();
8828 IEM_MC_END();
8829 return VINF_SUCCESS;
8830}
8831
8832
8833/** Opcode 0x71. */
8834FNIEMOP_DEF(iemOp_jno_Jb)
8835{
8836 IEMOP_MNEMONIC("jno Jb");
8837 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8838 IEMOP_HLP_NO_LOCK_PREFIX();
8839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8840
8841 IEM_MC_BEGIN(0, 0);
8842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8843 IEM_MC_ADVANCE_RIP();
8844 } IEM_MC_ELSE() {
8845 IEM_MC_REL_JMP_S8(i8Imm);
8846 } IEM_MC_ENDIF();
8847 IEM_MC_END();
8848 return VINF_SUCCESS;
8849}
8850
8851/** Opcode 0x72. */
8852FNIEMOP_DEF(iemOp_jc_Jb)
8853{
8854 IEMOP_MNEMONIC("jc/jnae Jb");
8855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8856 IEMOP_HLP_NO_LOCK_PREFIX();
8857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8858
8859 IEM_MC_BEGIN(0, 0);
8860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8861 IEM_MC_REL_JMP_S8(i8Imm);
8862 } IEM_MC_ELSE() {
8863 IEM_MC_ADVANCE_RIP();
8864 } IEM_MC_ENDIF();
8865 IEM_MC_END();
8866 return VINF_SUCCESS;
8867}
8868
8869
8870/** Opcode 0x73. */
8871FNIEMOP_DEF(iemOp_jnc_Jb)
8872{
8873 IEMOP_MNEMONIC("jnc/jnb Jb");
8874 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8875 IEMOP_HLP_NO_LOCK_PREFIX();
8876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8877
8878 IEM_MC_BEGIN(0, 0);
8879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8880 IEM_MC_ADVANCE_RIP();
8881 } IEM_MC_ELSE() {
8882 IEM_MC_REL_JMP_S8(i8Imm);
8883 } IEM_MC_ENDIF();
8884 IEM_MC_END();
8885 return VINF_SUCCESS;
8886}
8887
8888
8889/** Opcode 0x74. */
8890FNIEMOP_DEF(iemOp_je_Jb)
8891{
8892 IEMOP_MNEMONIC("je/jz Jb");
8893 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8894 IEMOP_HLP_NO_LOCK_PREFIX();
8895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8896
8897 IEM_MC_BEGIN(0, 0);
8898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8899 IEM_MC_REL_JMP_S8(i8Imm);
8900 } IEM_MC_ELSE() {
8901 IEM_MC_ADVANCE_RIP();
8902 } IEM_MC_ENDIF();
8903 IEM_MC_END();
8904 return VINF_SUCCESS;
8905}
8906
8907
8908/** Opcode 0x75. */
8909FNIEMOP_DEF(iemOp_jne_Jb)
8910{
8911 IEMOP_MNEMONIC("jne/jnz Jb");
8912 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8913 IEMOP_HLP_NO_LOCK_PREFIX();
8914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8915
8916 IEM_MC_BEGIN(0, 0);
8917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8918 IEM_MC_ADVANCE_RIP();
8919 } IEM_MC_ELSE() {
8920 IEM_MC_REL_JMP_S8(i8Imm);
8921 } IEM_MC_ENDIF();
8922 IEM_MC_END();
8923 return VINF_SUCCESS;
8924}
8925
8926
8927/** Opcode 0x76. */
8928FNIEMOP_DEF(iemOp_jbe_Jb)
8929{
8930 IEMOP_MNEMONIC("jbe/jna Jb");
8931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8932 IEMOP_HLP_NO_LOCK_PREFIX();
8933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8934
8935 IEM_MC_BEGIN(0, 0);
8936 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8937 IEM_MC_REL_JMP_S8(i8Imm);
8938 } IEM_MC_ELSE() {
8939 IEM_MC_ADVANCE_RIP();
8940 } IEM_MC_ENDIF();
8941 IEM_MC_END();
8942 return VINF_SUCCESS;
8943}
8944
8945
8946/** Opcode 0x77. */
8947FNIEMOP_DEF(iemOp_jnbe_Jb)
8948{
8949 IEMOP_MNEMONIC("jnbe/ja Jb");
8950 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8951 IEMOP_HLP_NO_LOCK_PREFIX();
8952 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8953
8954 IEM_MC_BEGIN(0, 0);
8955 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8956 IEM_MC_ADVANCE_RIP();
8957 } IEM_MC_ELSE() {
8958 IEM_MC_REL_JMP_S8(i8Imm);
8959 } IEM_MC_ENDIF();
8960 IEM_MC_END();
8961 return VINF_SUCCESS;
8962}
8963
8964
8965/** Opcode 0x78. */
8966FNIEMOP_DEF(iemOp_js_Jb)
8967{
8968 IEMOP_MNEMONIC("js Jb");
8969 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8970 IEMOP_HLP_NO_LOCK_PREFIX();
8971 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8972
8973 IEM_MC_BEGIN(0, 0);
8974 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8975 IEM_MC_REL_JMP_S8(i8Imm);
8976 } IEM_MC_ELSE() {
8977 IEM_MC_ADVANCE_RIP();
8978 } IEM_MC_ENDIF();
8979 IEM_MC_END();
8980 return VINF_SUCCESS;
8981}
8982
8983
8984/** Opcode 0x79. */
8985FNIEMOP_DEF(iemOp_jns_Jb)
8986{
8987 IEMOP_MNEMONIC("jns Jb");
8988 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8989 IEMOP_HLP_NO_LOCK_PREFIX();
8990 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8991
8992 IEM_MC_BEGIN(0, 0);
8993 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8994 IEM_MC_ADVANCE_RIP();
8995 } IEM_MC_ELSE() {
8996 IEM_MC_REL_JMP_S8(i8Imm);
8997 } IEM_MC_ENDIF();
8998 IEM_MC_END();
8999 return VINF_SUCCESS;
9000}
9001
9002
9003/** Opcode 0x7a. */
9004FNIEMOP_DEF(iemOp_jp_Jb)
9005{
9006 IEMOP_MNEMONIC("jp Jb");
9007 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9008 IEMOP_HLP_NO_LOCK_PREFIX();
9009 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9010
9011 IEM_MC_BEGIN(0, 0);
9012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9013 IEM_MC_REL_JMP_S8(i8Imm);
9014 } IEM_MC_ELSE() {
9015 IEM_MC_ADVANCE_RIP();
9016 } IEM_MC_ENDIF();
9017 IEM_MC_END();
9018 return VINF_SUCCESS;
9019}
9020
9021
9022/** Opcode 0x7b. */
9023FNIEMOP_DEF(iemOp_jnp_Jb)
9024{
9025 IEMOP_MNEMONIC("jnp Jb");
9026 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9027 IEMOP_HLP_NO_LOCK_PREFIX();
9028 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9029
9030 IEM_MC_BEGIN(0, 0);
9031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9032 IEM_MC_ADVANCE_RIP();
9033 } IEM_MC_ELSE() {
9034 IEM_MC_REL_JMP_S8(i8Imm);
9035 } IEM_MC_ENDIF();
9036 IEM_MC_END();
9037 return VINF_SUCCESS;
9038}
9039
9040
9041/** Opcode 0x7c. */
9042FNIEMOP_DEF(iemOp_jl_Jb)
9043{
9044 IEMOP_MNEMONIC("jl/jnge Jb");
9045 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9046 IEMOP_HLP_NO_LOCK_PREFIX();
9047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9048
9049 IEM_MC_BEGIN(0, 0);
9050 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9051 IEM_MC_REL_JMP_S8(i8Imm);
9052 } IEM_MC_ELSE() {
9053 IEM_MC_ADVANCE_RIP();
9054 } IEM_MC_ENDIF();
9055 IEM_MC_END();
9056 return VINF_SUCCESS;
9057}
9058
9059
9060/** Opcode 0x7d. */
9061FNIEMOP_DEF(iemOp_jnl_Jb)
9062{
9063 IEMOP_MNEMONIC("jnl/jge Jb");
9064 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9065 IEMOP_HLP_NO_LOCK_PREFIX();
9066 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9067
9068 IEM_MC_BEGIN(0, 0);
9069 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9070 IEM_MC_ADVANCE_RIP();
9071 } IEM_MC_ELSE() {
9072 IEM_MC_REL_JMP_S8(i8Imm);
9073 } IEM_MC_ENDIF();
9074 IEM_MC_END();
9075 return VINF_SUCCESS;
9076}
9077
9078
9079/** Opcode 0x7e. */
9080FNIEMOP_DEF(iemOp_jle_Jb)
9081{
9082 IEMOP_MNEMONIC("jle/jng Jb");
9083 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9084 IEMOP_HLP_NO_LOCK_PREFIX();
9085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9086
9087 IEM_MC_BEGIN(0, 0);
9088 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9089 IEM_MC_REL_JMP_S8(i8Imm);
9090 } IEM_MC_ELSE() {
9091 IEM_MC_ADVANCE_RIP();
9092 } IEM_MC_ENDIF();
9093 IEM_MC_END();
9094 return VINF_SUCCESS;
9095}
9096
9097
9098/** Opcode 0x7f. */
9099FNIEMOP_DEF(iemOp_jnle_Jb)
9100{
9101 IEMOP_MNEMONIC("jnle/jg Jb");
9102 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9103 IEMOP_HLP_NO_LOCK_PREFIX();
9104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9105
9106 IEM_MC_BEGIN(0, 0);
9107 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9108 IEM_MC_ADVANCE_RIP();
9109 } IEM_MC_ELSE() {
9110 IEM_MC_REL_JMP_S8(i8Imm);
9111 } IEM_MC_ENDIF();
9112 IEM_MC_END();
9113 return VINF_SUCCESS;
9114}
9115
9116
9117/** Opcode 0x80. */
9118FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9119{
9120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9121 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9122 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9123
9124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9125 {
9126 /* register target */
9127 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9128 IEMOP_HLP_NO_LOCK_PREFIX();
9129 IEM_MC_BEGIN(3, 0);
9130 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9131 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9133
9134 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9135 IEM_MC_REF_EFLAGS(pEFlags);
9136 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9137
9138 IEM_MC_ADVANCE_RIP();
9139 IEM_MC_END();
9140 }
9141 else
9142 {
9143 /* memory target */
9144 uint32_t fAccess;
9145 if (pImpl->pfnLockedU8)
9146 fAccess = IEM_ACCESS_DATA_RW;
9147 else
9148 { /* CMP */
9149 IEMOP_HLP_NO_LOCK_PREFIX();
9150 fAccess = IEM_ACCESS_DATA_R;
9151 }
9152 IEM_MC_BEGIN(3, 2);
9153 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9156
9157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9158 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9159 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9160
9161 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9162 IEM_MC_FETCH_EFLAGS(EFlags);
9163 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9165 else
9166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9167
9168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9169 IEM_MC_COMMIT_EFLAGS(EFlags);
9170 IEM_MC_ADVANCE_RIP();
9171 IEM_MC_END();
9172 }
9173 return VINF_SUCCESS;
9174}
9175
9176
9177/** Opcode 0x81. */
9178FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9179{
9180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9181 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9182 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9183
9184 switch (pIemCpu->enmEffOpSize)
9185 {
9186 case IEMMODE_16BIT:
9187 {
9188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9189 {
9190 /* register target */
9191 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9192 IEMOP_HLP_NO_LOCK_PREFIX();
9193 IEM_MC_BEGIN(3, 0);
9194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9195 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9197
9198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9199 IEM_MC_REF_EFLAGS(pEFlags);
9200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9201
9202 IEM_MC_ADVANCE_RIP();
9203 IEM_MC_END();
9204 }
9205 else
9206 {
9207 /* memory target */
9208 uint32_t fAccess;
9209 if (pImpl->pfnLockedU16)
9210 fAccess = IEM_ACCESS_DATA_RW;
9211 else
9212 { /* CMP, TEST */
9213 IEMOP_HLP_NO_LOCK_PREFIX();
9214 fAccess = IEM_ACCESS_DATA_R;
9215 }
9216 IEM_MC_BEGIN(3, 2);
9217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9218 IEM_MC_ARG(uint16_t, u16Src, 1);
9219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9221
9222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9223 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9224 IEM_MC_ASSIGN(u16Src, u16Imm);
9225 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9226 IEM_MC_FETCH_EFLAGS(EFlags);
9227 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9229 else
9230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9231
9232 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9233 IEM_MC_COMMIT_EFLAGS(EFlags);
9234 IEM_MC_ADVANCE_RIP();
9235 IEM_MC_END();
9236 }
9237 break;
9238 }
9239
9240 case IEMMODE_32BIT:
9241 {
9242 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9243 {
9244 /* register target */
9245 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9246 IEMOP_HLP_NO_LOCK_PREFIX();
9247 IEM_MC_BEGIN(3, 0);
9248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9249 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9251
9252 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9253 IEM_MC_REF_EFLAGS(pEFlags);
9254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9255 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9256
9257 IEM_MC_ADVANCE_RIP();
9258 IEM_MC_END();
9259 }
9260 else
9261 {
9262 /* memory target */
9263 uint32_t fAccess;
9264 if (pImpl->pfnLockedU32)
9265 fAccess = IEM_ACCESS_DATA_RW;
9266 else
9267 { /* CMP, TEST */
9268 IEMOP_HLP_NO_LOCK_PREFIX();
9269 fAccess = IEM_ACCESS_DATA_R;
9270 }
9271 IEM_MC_BEGIN(3, 2);
9272 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9273 IEM_MC_ARG(uint32_t, u32Src, 1);
9274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9276
9277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9278 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9279 IEM_MC_ASSIGN(u32Src, u32Imm);
9280 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9281 IEM_MC_FETCH_EFLAGS(EFlags);
9282 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9283 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9284 else
9285 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9286
9287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9288 IEM_MC_COMMIT_EFLAGS(EFlags);
9289 IEM_MC_ADVANCE_RIP();
9290 IEM_MC_END();
9291 }
9292 break;
9293 }
9294
9295 case IEMMODE_64BIT:
9296 {
9297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9298 {
9299 /* register target */
9300 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9301 IEMOP_HLP_NO_LOCK_PREFIX();
9302 IEM_MC_BEGIN(3, 0);
9303 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9304 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9305 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9306
9307 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9308 IEM_MC_REF_EFLAGS(pEFlags);
9309 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9310
9311 IEM_MC_ADVANCE_RIP();
9312 IEM_MC_END();
9313 }
9314 else
9315 {
9316 /* memory target */
9317 uint32_t fAccess;
9318 if (pImpl->pfnLockedU64)
9319 fAccess = IEM_ACCESS_DATA_RW;
9320 else
9321 { /* CMP */
9322 IEMOP_HLP_NO_LOCK_PREFIX();
9323 fAccess = IEM_ACCESS_DATA_R;
9324 }
9325 IEM_MC_BEGIN(3, 2);
9326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9327 IEM_MC_ARG(uint64_t, u64Src, 1);
9328 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9330
9331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9332 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9333 IEM_MC_ASSIGN(u64Src, u64Imm);
9334 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9335 IEM_MC_FETCH_EFLAGS(EFlags);
9336 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9338 else
9339 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9340
9341 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9342 IEM_MC_COMMIT_EFLAGS(EFlags);
9343 IEM_MC_ADVANCE_RIP();
9344 IEM_MC_END();
9345 }
9346 break;
9347 }
9348 }
9349 return VINF_SUCCESS;
9350}
9351
9352
9353/** Opcode 0x82. */
9354FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9355{
9356 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9357 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9358}
9359
9360
9361/** Opcode 0x83. */
9362FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9363{
9364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9365 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9366 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9367
9368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9369 {
9370 /*
9371 * Register target
9372 */
9373 IEMOP_HLP_NO_LOCK_PREFIX();
9374 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9375 switch (pIemCpu->enmEffOpSize)
9376 {
9377 case IEMMODE_16BIT:
9378 {
9379 IEM_MC_BEGIN(3, 0);
9380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9381 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9382 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9383
9384 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9385 IEM_MC_REF_EFLAGS(pEFlags);
9386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9387
9388 IEM_MC_ADVANCE_RIP();
9389 IEM_MC_END();
9390 break;
9391 }
9392
9393 case IEMMODE_32BIT:
9394 {
9395 IEM_MC_BEGIN(3, 0);
9396 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9397 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9398 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9399
9400 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9401 IEM_MC_REF_EFLAGS(pEFlags);
9402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9403 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9404
9405 IEM_MC_ADVANCE_RIP();
9406 IEM_MC_END();
9407 break;
9408 }
9409
9410 case IEMMODE_64BIT:
9411 {
9412 IEM_MC_BEGIN(3, 0);
9413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9414 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9416
9417 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9418 IEM_MC_REF_EFLAGS(pEFlags);
9419 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9420
9421 IEM_MC_ADVANCE_RIP();
9422 IEM_MC_END();
9423 break;
9424 }
9425 }
9426 }
9427 else
9428 {
9429 /*
9430 * Memory target.
9431 */
9432 uint32_t fAccess;
9433 if (pImpl->pfnLockedU16)
9434 fAccess = IEM_ACCESS_DATA_RW;
9435 else
9436 { /* CMP */
9437 IEMOP_HLP_NO_LOCK_PREFIX();
9438 fAccess = IEM_ACCESS_DATA_R;
9439 }
9440
9441 switch (pIemCpu->enmEffOpSize)
9442 {
9443 case IEMMODE_16BIT:
9444 {
9445 IEM_MC_BEGIN(3, 2);
9446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9447 IEM_MC_ARG(uint16_t, u16Src, 1);
9448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9450
9451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9452 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9453 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9454 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9455 IEM_MC_FETCH_EFLAGS(EFlags);
9456 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9458 else
9459 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9460
9461 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9462 IEM_MC_COMMIT_EFLAGS(EFlags);
9463 IEM_MC_ADVANCE_RIP();
9464 IEM_MC_END();
9465 break;
9466 }
9467
9468 case IEMMODE_32BIT:
9469 {
9470 IEM_MC_BEGIN(3, 2);
9471 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9472 IEM_MC_ARG(uint32_t, u32Src, 1);
9473 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9475
9476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9477 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9478 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9479 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9480 IEM_MC_FETCH_EFLAGS(EFlags);
9481 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9483 else
9484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9485
9486 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9487 IEM_MC_COMMIT_EFLAGS(EFlags);
9488 IEM_MC_ADVANCE_RIP();
9489 IEM_MC_END();
9490 break;
9491 }
9492
9493 case IEMMODE_64BIT:
9494 {
9495 IEM_MC_BEGIN(3, 2);
9496 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9497 IEM_MC_ARG(uint64_t, u64Src, 1);
9498 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9500
9501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9503 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9504 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9505 IEM_MC_FETCH_EFLAGS(EFlags);
9506 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9508 else
9509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9510
9511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9512 IEM_MC_COMMIT_EFLAGS(EFlags);
9513 IEM_MC_ADVANCE_RIP();
9514 IEM_MC_END();
9515 break;
9516 }
9517 }
9518 }
9519 return VINF_SUCCESS;
9520}
9521
9522
9523/** Opcode 0x84. */
9524FNIEMOP_DEF(iemOp_test_Eb_Gb)
9525{
9526 IEMOP_MNEMONIC("test Eb,Gb");
9527 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9528 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9529 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9530}
9531
9532
9533/** Opcode 0x85. */
9534FNIEMOP_DEF(iemOp_test_Ev_Gv)
9535{
9536 IEMOP_MNEMONIC("test Ev,Gv");
9537 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9538 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9539 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9540}
9541
9542
9543/** Opcode 0x86. */
9544FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9545{
9546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9547 IEMOP_MNEMONIC("xchg Eb,Gb");
9548
9549 /*
9550 * If rm is denoting a register, no more instruction bytes.
9551 */
9552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9553 {
9554 IEMOP_HLP_NO_LOCK_PREFIX();
9555
9556 IEM_MC_BEGIN(0, 2);
9557 IEM_MC_LOCAL(uint8_t, uTmp1);
9558 IEM_MC_LOCAL(uint8_t, uTmp2);
9559
9560 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9561 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9562 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9563 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9564
9565 IEM_MC_ADVANCE_RIP();
9566 IEM_MC_END();
9567 }
9568 else
9569 {
9570 /*
9571 * We're accessing memory.
9572 */
9573/** @todo the register must be committed separately! */
9574 IEM_MC_BEGIN(2, 2);
9575 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9576 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9578
9579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9580 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9581 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9582 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9583 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9584
9585 IEM_MC_ADVANCE_RIP();
9586 IEM_MC_END();
9587 }
9588 return VINF_SUCCESS;
9589}
9590
9591
9592/** Opcode 0x87. */
9593FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9594{
9595 IEMOP_MNEMONIC("xchg Ev,Gv");
9596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9597
9598 /*
9599 * If rm is denoting a register, no more instruction bytes.
9600 */
9601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9602 {
9603 IEMOP_HLP_NO_LOCK_PREFIX();
9604
9605 switch (pIemCpu->enmEffOpSize)
9606 {
9607 case IEMMODE_16BIT:
9608 IEM_MC_BEGIN(0, 2);
9609 IEM_MC_LOCAL(uint16_t, uTmp1);
9610 IEM_MC_LOCAL(uint16_t, uTmp2);
9611
9612 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9613 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9614 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9615 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9616
9617 IEM_MC_ADVANCE_RIP();
9618 IEM_MC_END();
9619 return VINF_SUCCESS;
9620
9621 case IEMMODE_32BIT:
9622 IEM_MC_BEGIN(0, 2);
9623 IEM_MC_LOCAL(uint32_t, uTmp1);
9624 IEM_MC_LOCAL(uint32_t, uTmp2);
9625
9626 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9627 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9628 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9629 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9630
9631 IEM_MC_ADVANCE_RIP();
9632 IEM_MC_END();
9633 return VINF_SUCCESS;
9634
9635 case IEMMODE_64BIT:
9636 IEM_MC_BEGIN(0, 2);
9637 IEM_MC_LOCAL(uint64_t, uTmp1);
9638 IEM_MC_LOCAL(uint64_t, uTmp2);
9639
9640 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9641 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9642 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9643 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9644
9645 IEM_MC_ADVANCE_RIP();
9646 IEM_MC_END();
9647 return VINF_SUCCESS;
9648
9649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9650 }
9651 }
9652 else
9653 {
9654 /*
9655 * We're accessing memory.
9656 */
9657 switch (pIemCpu->enmEffOpSize)
9658 {
9659/** @todo the register must be committed separately! */
9660 case IEMMODE_16BIT:
9661 IEM_MC_BEGIN(2, 2);
9662 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9663 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9665
9666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9667 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9668 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9669 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9671
9672 IEM_MC_ADVANCE_RIP();
9673 IEM_MC_END();
9674 return VINF_SUCCESS;
9675
9676 case IEMMODE_32BIT:
9677 IEM_MC_BEGIN(2, 2);
9678 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9679 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9681
9682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9683 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9684 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9685 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9686 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9687
9688 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9689 IEM_MC_ADVANCE_RIP();
9690 IEM_MC_END();
9691 return VINF_SUCCESS;
9692
9693 case IEMMODE_64BIT:
9694 IEM_MC_BEGIN(2, 2);
9695 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9696 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9698
9699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9700 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9701 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9702 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9704
9705 IEM_MC_ADVANCE_RIP();
9706 IEM_MC_END();
9707 return VINF_SUCCESS;
9708
9709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9710 }
9711 }
9712}
9713
9714
9715/** Opcode 0x88. */
9716FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9717{
9718 IEMOP_MNEMONIC("mov Eb,Gb");
9719
9720 uint8_t bRm;
9721 IEM_OPCODE_GET_NEXT_U8(&bRm);
9722 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9723
9724 /*
9725 * If rm is denoting a register, no more instruction bytes.
9726 */
9727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9728 {
9729 IEM_MC_BEGIN(0, 1);
9730 IEM_MC_LOCAL(uint8_t, u8Value);
9731 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9732 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9733 IEM_MC_ADVANCE_RIP();
9734 IEM_MC_END();
9735 }
9736 else
9737 {
9738 /*
9739 * We're writing a register to memory.
9740 */
9741 IEM_MC_BEGIN(0, 2);
9742 IEM_MC_LOCAL(uint8_t, u8Value);
9743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9745 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9746 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9747 IEM_MC_ADVANCE_RIP();
9748 IEM_MC_END();
9749 }
9750 return VINF_SUCCESS;
9751
9752}
9753
9754
9755/** Opcode 0x89. */
9756FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9757{
9758 IEMOP_MNEMONIC("mov Ev,Gv");
9759
9760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9761 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9762
9763 /*
9764 * If rm is denoting a register, no more instruction bytes.
9765 */
9766 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9767 {
9768 switch (pIemCpu->enmEffOpSize)
9769 {
9770 case IEMMODE_16BIT:
9771 IEM_MC_BEGIN(0, 1);
9772 IEM_MC_LOCAL(uint16_t, u16Value);
9773 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9774 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9775 IEM_MC_ADVANCE_RIP();
9776 IEM_MC_END();
9777 break;
9778
9779 case IEMMODE_32BIT:
9780 IEM_MC_BEGIN(0, 1);
9781 IEM_MC_LOCAL(uint32_t, u32Value);
9782 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9783 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9784 IEM_MC_ADVANCE_RIP();
9785 IEM_MC_END();
9786 break;
9787
9788 case IEMMODE_64BIT:
9789 IEM_MC_BEGIN(0, 1);
9790 IEM_MC_LOCAL(uint64_t, u64Value);
9791 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9792 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9793 IEM_MC_ADVANCE_RIP();
9794 IEM_MC_END();
9795 break;
9796 }
9797 }
9798 else
9799 {
9800 /*
9801 * We're writing a register to memory.
9802 */
9803 switch (pIemCpu->enmEffOpSize)
9804 {
9805 case IEMMODE_16BIT:
9806 IEM_MC_BEGIN(0, 2);
9807 IEM_MC_LOCAL(uint16_t, u16Value);
9808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9810 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9811 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9812 IEM_MC_ADVANCE_RIP();
9813 IEM_MC_END();
9814 break;
9815
9816 case IEMMODE_32BIT:
9817 IEM_MC_BEGIN(0, 2);
9818 IEM_MC_LOCAL(uint32_t, u32Value);
9819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9821 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9822 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9823 IEM_MC_ADVANCE_RIP();
9824 IEM_MC_END();
9825 break;
9826
9827 case IEMMODE_64BIT:
9828 IEM_MC_BEGIN(0, 2);
9829 IEM_MC_LOCAL(uint64_t, u64Value);
9830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9832 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9833 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9834 IEM_MC_ADVANCE_RIP();
9835 IEM_MC_END();
9836 break;
9837 }
9838 }
9839 return VINF_SUCCESS;
9840}
9841
9842
9843/** Opcode 0x8a. */
9844FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9845{
9846 IEMOP_MNEMONIC("mov Gb,Eb");
9847
9848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9849 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9850
9851 /*
9852 * If rm is denoting a register, no more instruction bytes.
9853 */
9854 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9855 {
9856 IEM_MC_BEGIN(0, 1);
9857 IEM_MC_LOCAL(uint8_t, u8Value);
9858 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9859 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9860 IEM_MC_ADVANCE_RIP();
9861 IEM_MC_END();
9862 }
9863 else
9864 {
9865 /*
9866 * We're loading a register from memory.
9867 */
9868 IEM_MC_BEGIN(0, 2);
9869 IEM_MC_LOCAL(uint8_t, u8Value);
9870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9872 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
9873 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9874 IEM_MC_ADVANCE_RIP();
9875 IEM_MC_END();
9876 }
9877 return VINF_SUCCESS;
9878}
9879
9880
9881/** Opcode 0x8b. */
9882FNIEMOP_DEF(iemOp_mov_Gv_Ev)
9883{
9884 IEMOP_MNEMONIC("mov Gv,Ev");
9885
9886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9887 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9888
9889 /*
9890 * If rm is denoting a register, no more instruction bytes.
9891 */
9892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9893 {
9894 switch (pIemCpu->enmEffOpSize)
9895 {
9896 case IEMMODE_16BIT:
9897 IEM_MC_BEGIN(0, 1);
9898 IEM_MC_LOCAL(uint16_t, u16Value);
9899 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9900 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9901 IEM_MC_ADVANCE_RIP();
9902 IEM_MC_END();
9903 break;
9904
9905 case IEMMODE_32BIT:
9906 IEM_MC_BEGIN(0, 1);
9907 IEM_MC_LOCAL(uint32_t, u32Value);
9908 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9909 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9910 IEM_MC_ADVANCE_RIP();
9911 IEM_MC_END();
9912 break;
9913
9914 case IEMMODE_64BIT:
9915 IEM_MC_BEGIN(0, 1);
9916 IEM_MC_LOCAL(uint64_t, u64Value);
9917 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9918 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9919 IEM_MC_ADVANCE_RIP();
9920 IEM_MC_END();
9921 break;
9922 }
9923 }
9924 else
9925 {
9926 /*
9927 * We're loading a register from memory.
9928 */
9929 switch (pIemCpu->enmEffOpSize)
9930 {
9931 case IEMMODE_16BIT:
9932 IEM_MC_BEGIN(0, 2);
9933 IEM_MC_LOCAL(uint16_t, u16Value);
9934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9936 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
9937 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9938 IEM_MC_ADVANCE_RIP();
9939 IEM_MC_END();
9940 break;
9941
9942 case IEMMODE_32BIT:
9943 IEM_MC_BEGIN(0, 2);
9944 IEM_MC_LOCAL(uint32_t, u32Value);
9945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9947 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
9948 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9949 IEM_MC_ADVANCE_RIP();
9950 IEM_MC_END();
9951 break;
9952
9953 case IEMMODE_64BIT:
9954 IEM_MC_BEGIN(0, 2);
9955 IEM_MC_LOCAL(uint64_t, u64Value);
9956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9958 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
9959 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9960 IEM_MC_ADVANCE_RIP();
9961 IEM_MC_END();
9962 break;
9963 }
9964 }
9965 return VINF_SUCCESS;
9966}
9967
9968
9969/** Opcode 0x63. */
9970FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
9971{
9972 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
9973 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
9974 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
9975 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
9976 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
9977}
9978
9979
9980/** Opcode 0x8c. */
9981FNIEMOP_DEF(iemOp_mov_Ev_Sw)
9982{
9983 IEMOP_MNEMONIC("mov Ev,Sw");
9984
9985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9986 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9987
9988 /*
9989 * Check that the destination register exists. The REX.R prefix is ignored.
9990 */
9991 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9992 if ( iSegReg > X86_SREG_GS)
9993 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9994
9995 /*
9996 * If rm is denoting a register, no more instruction bytes.
9997 * In that case, the operand size is respected and the upper bits are
9998 * cleared (starting with some pentium).
9999 */
10000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10001 {
10002 switch (pIemCpu->enmEffOpSize)
10003 {
10004 case IEMMODE_16BIT:
10005 IEM_MC_BEGIN(0, 1);
10006 IEM_MC_LOCAL(uint16_t, u16Value);
10007 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10008 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10009 IEM_MC_ADVANCE_RIP();
10010 IEM_MC_END();
10011 break;
10012
10013 case IEMMODE_32BIT:
10014 IEM_MC_BEGIN(0, 1);
10015 IEM_MC_LOCAL(uint32_t, u32Value);
10016 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10017 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10018 IEM_MC_ADVANCE_RIP();
10019 IEM_MC_END();
10020 break;
10021
10022 case IEMMODE_64BIT:
10023 IEM_MC_BEGIN(0, 1);
10024 IEM_MC_LOCAL(uint64_t, u64Value);
10025 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10026 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10027 IEM_MC_ADVANCE_RIP();
10028 IEM_MC_END();
10029 break;
10030 }
10031 }
10032 else
10033 {
10034 /*
10035 * We're saving the register to memory. The access is word sized
10036 * regardless of operand size prefixes.
10037 */
10038#if 0 /* not necessary */
10039 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10040#endif
10041 IEM_MC_BEGIN(0, 2);
10042 IEM_MC_LOCAL(uint16_t, u16Value);
10043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10045 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10046 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10047 IEM_MC_ADVANCE_RIP();
10048 IEM_MC_END();
10049 }
10050 return VINF_SUCCESS;
10051}
10052
10053
10054
10055
10056/** Opcode 0x8d. */
10057FNIEMOP_DEF(iemOp_lea_Gv_M)
10058{
10059 IEMOP_MNEMONIC("lea Gv,M");
10060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10061 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10062 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10063 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10064
10065 switch (pIemCpu->enmEffOpSize)
10066 {
10067 case IEMMODE_16BIT:
10068 IEM_MC_BEGIN(0, 2);
10069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10070 IEM_MC_LOCAL(uint16_t, u16Cast);
10071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10072 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10073 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10074 IEM_MC_ADVANCE_RIP();
10075 IEM_MC_END();
10076 return VINF_SUCCESS;
10077
10078 case IEMMODE_32BIT:
10079 IEM_MC_BEGIN(0, 2);
10080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10081 IEM_MC_LOCAL(uint32_t, u32Cast);
10082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10083 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10084 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10085 IEM_MC_ADVANCE_RIP();
10086 IEM_MC_END();
10087 return VINF_SUCCESS;
10088
10089 case IEMMODE_64BIT:
10090 IEM_MC_BEGIN(0, 1);
10091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10093 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10094 IEM_MC_ADVANCE_RIP();
10095 IEM_MC_END();
10096 return VINF_SUCCESS;
10097 }
10098 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
10099}
10100
10101
10102/** Opcode 0x8e. */
10103FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10104{
10105 IEMOP_MNEMONIC("mov Sw,Ev");
10106
10107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10108 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10109
10110 /*
10111 * The practical operand size is 16-bit.
10112 */
10113#if 0 /* not necessary */
10114 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10115#endif
10116
10117 /*
10118 * Check that the destination register exists and can be used with this
10119 * instruction. The REX.R prefix is ignored.
10120 */
10121 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10122 if ( iSegReg == X86_SREG_CS
10123 || iSegReg > X86_SREG_GS)
10124 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10125
10126 /*
10127 * If rm is denoting a register, no more instruction bytes.
10128 */
10129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10130 {
10131 IEM_MC_BEGIN(2, 0);
10132 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10133 IEM_MC_ARG(uint16_t, u16Value, 1);
10134 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10135 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10136 IEM_MC_END();
10137 }
10138 else
10139 {
10140 /*
10141 * We're loading the register from memory. The access is word sized
10142 * regardless of operand size prefixes.
10143 */
10144 IEM_MC_BEGIN(2, 1);
10145 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10146 IEM_MC_ARG(uint16_t, u16Value, 1);
10147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10149 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10150 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10151 IEM_MC_END();
10152 }
10153 return VINF_SUCCESS;
10154}
10155
10156
10157/** Opcode 0x8f /0. */
10158FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10159{
10160 /* This bugger is rather annoying as it requires rSP to be updated before
10161 doing the effective address calculations. Will eventually require a
10162 split between the R/M+SIB decoding and the effective address
10163 calculation - which is something that is required for any attempt at
10164 reusing this code for a recompiler. It may also be good to have if we
10165 need to delay #UD exception caused by invalid lock prefixes.
10166
10167 For now, we'll do a mostly safe interpreter-only implementation here. */
10168 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10169 * now until tests show it's checked.. */
10170 IEMOP_MNEMONIC("pop Ev");
10171 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10172
10173 /* Register access is relatively easy and can share code. */
10174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10175 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10176
10177 /*
10178 * Memory target.
10179 *
10180 * Intel says that RSP is incremented before it's used in any effective
10181 * address calcuations. This means some serious extra annoyance here since
10182 * we decode and calculate the effective address in one step and like to
10183 * delay committing registers till everything is done.
10184 *
10185 * So, we'll decode and calculate the effective address twice. This will
10186 * require some recoding if turned into a recompiler.
10187 */
10188 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10189
10190#ifndef TST_IEM_CHECK_MC
10191 /* Calc effective address with modified ESP. */
10192 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10193 RTGCPTR GCPtrEff;
10194 VBOXSTRICTRC rcStrict;
10195 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10196 if (rcStrict != VINF_SUCCESS)
10197 return rcStrict;
10198 pIemCpu->offOpcode = offOpcodeSaved;
10199
10200 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10201 uint64_t const RspSaved = pCtx->rsp;
10202 switch (pIemCpu->enmEffOpSize)
10203 {
10204 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10205 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10206 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10208 }
10209 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10210 Assert(rcStrict == VINF_SUCCESS);
10211 pCtx->rsp = RspSaved;
10212
10213 /* Perform the operation - this should be CImpl. */
10214 RTUINT64U TmpRsp;
10215 TmpRsp.u = pCtx->rsp;
10216 switch (pIemCpu->enmEffOpSize)
10217 {
10218 case IEMMODE_16BIT:
10219 {
10220 uint16_t u16Value;
10221 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10222 if (rcStrict == VINF_SUCCESS)
10223 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10224 break;
10225 }
10226
10227 case IEMMODE_32BIT:
10228 {
10229 uint32_t u32Value;
10230 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10231 if (rcStrict == VINF_SUCCESS)
10232 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10233 break;
10234 }
10235
10236 case IEMMODE_64BIT:
10237 {
10238 uint64_t u64Value;
10239 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10240 if (rcStrict == VINF_SUCCESS)
10241 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10242 break;
10243 }
10244
10245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10246 }
10247 if (rcStrict == VINF_SUCCESS)
10248 {
10249 pCtx->rsp = TmpRsp.u;
10250 iemRegUpdateRipAndClearRF(pIemCpu);
10251 }
10252 return rcStrict;
10253
10254#else
10255 return VERR_IEM_IPE_2;
10256#endif
10257}
10258
10259
10260/** Opcode 0x8f. */
10261FNIEMOP_DEF(iemOp_Grp1A)
10262{
10263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10264 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10265 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10266
10267 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10268 /** @todo XOP decoding. */
10269 IEMOP_MNEMONIC("3-byte-xop");
10270 return IEMOP_RAISE_INVALID_OPCODE();
10271}
10272
10273
10274/**
10275 * Common 'xchg reg,rAX' helper.
10276 */
10277FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10278{
10279 IEMOP_HLP_NO_LOCK_PREFIX();
10280
10281 iReg |= pIemCpu->uRexB;
10282 switch (pIemCpu->enmEffOpSize)
10283 {
10284 case IEMMODE_16BIT:
10285 IEM_MC_BEGIN(0, 2);
10286 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10287 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10288 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10289 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10290 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10291 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10292 IEM_MC_ADVANCE_RIP();
10293 IEM_MC_END();
10294 return VINF_SUCCESS;
10295
10296 case IEMMODE_32BIT:
10297 IEM_MC_BEGIN(0, 2);
10298 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10299 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10300 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10301 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10302 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10303 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10304 IEM_MC_ADVANCE_RIP();
10305 IEM_MC_END();
10306 return VINF_SUCCESS;
10307
10308 case IEMMODE_64BIT:
10309 IEM_MC_BEGIN(0, 2);
10310 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10311 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10312 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10313 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10314 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10315 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10316 IEM_MC_ADVANCE_RIP();
10317 IEM_MC_END();
10318 return VINF_SUCCESS;
10319
10320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10321 }
10322}
10323
10324
10325/** Opcode 0x90. */
10326FNIEMOP_DEF(iemOp_nop)
10327{
10328 /* R8/R8D and RAX/EAX can be exchanged. */
10329 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10330 {
10331 IEMOP_MNEMONIC("xchg r8,rAX");
10332 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10333 }
10334
10335 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10336 IEMOP_MNEMONIC("pause");
10337 else
10338 IEMOP_MNEMONIC("nop");
10339 IEM_MC_BEGIN(0, 0);
10340 IEM_MC_ADVANCE_RIP();
10341 IEM_MC_END();
10342 return VINF_SUCCESS;
10343}
10344
10345
10346/** Opcode 0x91. */
10347FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10348{
10349 IEMOP_MNEMONIC("xchg rCX,rAX");
10350 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10351}
10352
10353
10354/** Opcode 0x92. */
10355FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10356{
10357 IEMOP_MNEMONIC("xchg rDX,rAX");
10358 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10359}
10360
10361
10362/** Opcode 0x93. */
10363FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10364{
10365 IEMOP_MNEMONIC("xchg rBX,rAX");
10366 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10367}
10368
10369
10370/** Opcode 0x94. */
10371FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10372{
10373 IEMOP_MNEMONIC("xchg rSX,rAX");
10374 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10375}
10376
10377
10378/** Opcode 0x95. */
10379FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10380{
10381 IEMOP_MNEMONIC("xchg rBP,rAX");
10382 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10383}
10384
10385
10386/** Opcode 0x96. */
10387FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10388{
10389 IEMOP_MNEMONIC("xchg rSI,rAX");
10390 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10391}
10392
10393
10394/** Opcode 0x97. */
10395FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10396{
10397 IEMOP_MNEMONIC("xchg rDI,rAX");
10398 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10399}
10400
10401
10402/** Opcode 0x98. */
10403FNIEMOP_DEF(iemOp_cbw)
10404{
10405 IEMOP_HLP_NO_LOCK_PREFIX();
10406 switch (pIemCpu->enmEffOpSize)
10407 {
10408 case IEMMODE_16BIT:
10409 IEMOP_MNEMONIC("cbw");
10410 IEM_MC_BEGIN(0, 1);
10411 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10412 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10413 } IEM_MC_ELSE() {
10414 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10415 } IEM_MC_ENDIF();
10416 IEM_MC_ADVANCE_RIP();
10417 IEM_MC_END();
10418 return VINF_SUCCESS;
10419
10420 case IEMMODE_32BIT:
10421 IEMOP_MNEMONIC("cwde");
10422 IEM_MC_BEGIN(0, 1);
10423 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10424 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10425 } IEM_MC_ELSE() {
10426 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10427 } IEM_MC_ENDIF();
10428 IEM_MC_ADVANCE_RIP();
10429 IEM_MC_END();
10430 return VINF_SUCCESS;
10431
10432 case IEMMODE_64BIT:
10433 IEMOP_MNEMONIC("cdqe");
10434 IEM_MC_BEGIN(0, 1);
10435 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10436 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10437 } IEM_MC_ELSE() {
10438 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10439 } IEM_MC_ENDIF();
10440 IEM_MC_ADVANCE_RIP();
10441 IEM_MC_END();
10442 return VINF_SUCCESS;
10443
10444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10445 }
10446}
10447
10448
10449/** Opcode 0x99. */
10450FNIEMOP_DEF(iemOp_cwd)
10451{
10452 IEMOP_HLP_NO_LOCK_PREFIX();
10453 switch (pIemCpu->enmEffOpSize)
10454 {
10455 case IEMMODE_16BIT:
10456 IEMOP_MNEMONIC("cwd");
10457 IEM_MC_BEGIN(0, 1);
10458 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10459 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10460 } IEM_MC_ELSE() {
10461 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10462 } IEM_MC_ENDIF();
10463 IEM_MC_ADVANCE_RIP();
10464 IEM_MC_END();
10465 return VINF_SUCCESS;
10466
10467 case IEMMODE_32BIT:
10468 IEMOP_MNEMONIC("cdq");
10469 IEM_MC_BEGIN(0, 1);
10470 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10471 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10472 } IEM_MC_ELSE() {
10473 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10474 } IEM_MC_ENDIF();
10475 IEM_MC_ADVANCE_RIP();
10476 IEM_MC_END();
10477 return VINF_SUCCESS;
10478
10479 case IEMMODE_64BIT:
10480 IEMOP_MNEMONIC("cqo");
10481 IEM_MC_BEGIN(0, 1);
10482 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10483 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10484 } IEM_MC_ELSE() {
10485 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10486 } IEM_MC_ENDIF();
10487 IEM_MC_ADVANCE_RIP();
10488 IEM_MC_END();
10489 return VINF_SUCCESS;
10490
10491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10492 }
10493}
10494
10495
10496/** Opcode 0x9a. */
10497FNIEMOP_DEF(iemOp_call_Ap)
10498{
10499 IEMOP_MNEMONIC("call Ap");
10500 IEMOP_HLP_NO_64BIT();
10501
10502 /* Decode the far pointer address and pass it on to the far call C implementation. */
10503 uint32_t offSeg;
10504 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10505 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10506 else
10507 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10508 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10510 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10511}
10512
10513
10514/** Opcode 0x9b. (aka fwait) */
10515FNIEMOP_DEF(iemOp_wait)
10516{
10517 IEMOP_MNEMONIC("wait");
10518 IEMOP_HLP_NO_LOCK_PREFIX();
10519
10520 IEM_MC_BEGIN(0, 0);
10521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10523 IEM_MC_ADVANCE_RIP();
10524 IEM_MC_END();
10525 return VINF_SUCCESS;
10526}
10527
10528
10529/** Opcode 0x9c. */
10530FNIEMOP_DEF(iemOp_pushf_Fv)
10531{
10532 IEMOP_HLP_NO_LOCK_PREFIX();
10533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10534 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10535}
10536
10537
10538/** Opcode 0x9d. */
10539FNIEMOP_DEF(iemOp_popf_Fv)
10540{
10541 IEMOP_HLP_NO_LOCK_PREFIX();
10542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10543 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10544}
10545
10546
10547/** Opcode 0x9e. */
10548FNIEMOP_DEF(iemOp_sahf)
10549{
10550 IEMOP_MNEMONIC("sahf");
10551 IEMOP_HLP_NO_LOCK_PREFIX();
10552 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10553 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
10554 return IEMOP_RAISE_INVALID_OPCODE();
10555 IEM_MC_BEGIN(0, 2);
10556 IEM_MC_LOCAL(uint32_t, u32Flags);
10557 IEM_MC_LOCAL(uint32_t, EFlags);
10558 IEM_MC_FETCH_EFLAGS(EFlags);
10559 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10560 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10561 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10562 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10563 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10564 IEM_MC_COMMIT_EFLAGS(EFlags);
10565 IEM_MC_ADVANCE_RIP();
10566 IEM_MC_END();
10567 return VINF_SUCCESS;
10568}
10569
10570
10571/** Opcode 0x9f. */
10572FNIEMOP_DEF(iemOp_lahf)
10573{
10574 IEMOP_MNEMONIC("lahf");
10575 IEMOP_HLP_NO_LOCK_PREFIX();
10576 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10577 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
10578 return IEMOP_RAISE_INVALID_OPCODE();
10579 IEM_MC_BEGIN(0, 1);
10580 IEM_MC_LOCAL(uint8_t, u8Flags);
10581 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10582 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10583 IEM_MC_ADVANCE_RIP();
10584 IEM_MC_END();
10585 return VINF_SUCCESS;
10586}
10587
10588
10589/**
10590 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10591 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10592 * prefixes. Will return on failures.
10593 * @param a_GCPtrMemOff The variable to store the offset in.
10594 */
10595#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10596 do \
10597 { \
10598 switch (pIemCpu->enmEffAddrMode) \
10599 { \
10600 case IEMMODE_16BIT: \
10601 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10602 break; \
10603 case IEMMODE_32BIT: \
10604 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10605 break; \
10606 case IEMMODE_64BIT: \
10607 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10608 break; \
10609 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10610 } \
10611 IEMOP_HLP_NO_LOCK_PREFIX(); \
10612 } while (0)
10613
10614/** Opcode 0xa0. */
10615FNIEMOP_DEF(iemOp_mov_Al_Ob)
10616{
10617 /*
10618 * Get the offset and fend of lock prefixes.
10619 */
10620 RTGCPTR GCPtrMemOff;
10621 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10622
10623 /*
10624 * Fetch AL.
10625 */
10626 IEM_MC_BEGIN(0,1);
10627 IEM_MC_LOCAL(uint8_t, u8Tmp);
10628 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10629 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10630 IEM_MC_ADVANCE_RIP();
10631 IEM_MC_END();
10632 return VINF_SUCCESS;
10633}
10634
10635
10636/** Opcode 0xa1. */
10637FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10638{
10639 /*
10640 * Get the offset and fend of lock prefixes.
10641 */
10642 IEMOP_MNEMONIC("mov rAX,Ov");
10643 RTGCPTR GCPtrMemOff;
10644 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10645
10646 /*
10647 * Fetch rAX.
10648 */
10649 switch (pIemCpu->enmEffOpSize)
10650 {
10651 case IEMMODE_16BIT:
10652 IEM_MC_BEGIN(0,1);
10653 IEM_MC_LOCAL(uint16_t, u16Tmp);
10654 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10655 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10656 IEM_MC_ADVANCE_RIP();
10657 IEM_MC_END();
10658 return VINF_SUCCESS;
10659
10660 case IEMMODE_32BIT:
10661 IEM_MC_BEGIN(0,1);
10662 IEM_MC_LOCAL(uint32_t, u32Tmp);
10663 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10664 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10665 IEM_MC_ADVANCE_RIP();
10666 IEM_MC_END();
10667 return VINF_SUCCESS;
10668
10669 case IEMMODE_64BIT:
10670 IEM_MC_BEGIN(0,1);
10671 IEM_MC_LOCAL(uint64_t, u64Tmp);
10672 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10673 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10674 IEM_MC_ADVANCE_RIP();
10675 IEM_MC_END();
10676 return VINF_SUCCESS;
10677
10678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10679 }
10680}
10681
10682
10683/** Opcode 0xa2. */
10684FNIEMOP_DEF(iemOp_mov_Ob_AL)
10685{
10686 /*
10687 * Get the offset and fend of lock prefixes.
10688 */
10689 RTGCPTR GCPtrMemOff;
10690 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10691
10692 /*
10693 * Store AL.
10694 */
10695 IEM_MC_BEGIN(0,1);
10696 IEM_MC_LOCAL(uint8_t, u8Tmp);
10697 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10698 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10699 IEM_MC_ADVANCE_RIP();
10700 IEM_MC_END();
10701 return VINF_SUCCESS;
10702}
10703
10704
10705/** Opcode 0xa3. */
10706FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10707{
10708 /*
10709 * Get the offset and fend of lock prefixes.
10710 */
10711 RTGCPTR GCPtrMemOff;
10712 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10713
10714 /*
10715 * Store rAX.
10716 */
10717 switch (pIemCpu->enmEffOpSize)
10718 {
10719 case IEMMODE_16BIT:
10720 IEM_MC_BEGIN(0,1);
10721 IEM_MC_LOCAL(uint16_t, u16Tmp);
10722 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10723 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10724 IEM_MC_ADVANCE_RIP();
10725 IEM_MC_END();
10726 return VINF_SUCCESS;
10727
10728 case IEMMODE_32BIT:
10729 IEM_MC_BEGIN(0,1);
10730 IEM_MC_LOCAL(uint32_t, u32Tmp);
10731 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10732 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10733 IEM_MC_ADVANCE_RIP();
10734 IEM_MC_END();
10735 return VINF_SUCCESS;
10736
10737 case IEMMODE_64BIT:
10738 IEM_MC_BEGIN(0,1);
10739 IEM_MC_LOCAL(uint64_t, u64Tmp);
10740 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10741 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10742 IEM_MC_ADVANCE_RIP();
10743 IEM_MC_END();
10744 return VINF_SUCCESS;
10745
10746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10747 }
10748}
10749
10750/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10751#define IEM_MOVS_CASE(ValBits, AddrBits) \
10752 IEM_MC_BEGIN(0, 2); \
10753 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10754 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10755 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10756 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10757 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10758 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10760 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10761 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10762 } IEM_MC_ELSE() { \
10763 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10764 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10765 } IEM_MC_ENDIF(); \
10766 IEM_MC_ADVANCE_RIP(); \
10767 IEM_MC_END();
10768
10769/** Opcode 0xa4. */
10770FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10771{
10772 IEMOP_HLP_NO_LOCK_PREFIX();
10773
10774 /*
10775 * Use the C implementation if a repeat prefix is encountered.
10776 */
10777 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10778 {
10779 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10780 switch (pIemCpu->enmEffAddrMode)
10781 {
10782 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10786 }
10787 }
10788 IEMOP_MNEMONIC("movsb Xb,Yb");
10789
10790 /*
10791 * Sharing case implementation with movs[wdq] below.
10792 */
10793 switch (pIemCpu->enmEffAddrMode)
10794 {
10795 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10796 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10797 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10799 }
10800 return VINF_SUCCESS;
10801}
10802
10803
10804/** Opcode 0xa5. */
10805FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10806{
10807 IEMOP_HLP_NO_LOCK_PREFIX();
10808
10809 /*
10810 * Use the C implementation if a repeat prefix is encountered.
10811 */
10812 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10813 {
10814 IEMOP_MNEMONIC("rep movs Xv,Yv");
10815 switch (pIemCpu->enmEffOpSize)
10816 {
10817 case IEMMODE_16BIT:
10818 switch (pIemCpu->enmEffAddrMode)
10819 {
10820 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10821 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10822 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10824 }
10825 break;
10826 case IEMMODE_32BIT:
10827 switch (pIemCpu->enmEffAddrMode)
10828 {
10829 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10830 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10831 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10833 }
10834 case IEMMODE_64BIT:
10835 switch (pIemCpu->enmEffAddrMode)
10836 {
10837 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10838 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10839 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10841 }
10842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10843 }
10844 }
10845 IEMOP_MNEMONIC("movs Xv,Yv");
10846
10847 /*
10848 * Annoying double switch here.
10849 * Using ugly macro for implementing the cases, sharing it with movsb.
10850 */
10851 switch (pIemCpu->enmEffOpSize)
10852 {
10853 case IEMMODE_16BIT:
10854 switch (pIemCpu->enmEffAddrMode)
10855 {
10856 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
10857 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
10858 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
10859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10860 }
10861 break;
10862
10863 case IEMMODE_32BIT:
10864 switch (pIemCpu->enmEffAddrMode)
10865 {
10866 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
10867 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
10868 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
10869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10870 }
10871 break;
10872
10873 case IEMMODE_64BIT:
10874 switch (pIemCpu->enmEffAddrMode)
10875 {
10876 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10877 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
10878 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
10879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10880 }
10881 break;
10882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10883 }
10884 return VINF_SUCCESS;
10885}
10886
10887#undef IEM_MOVS_CASE
10888
10889/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
10890#define IEM_CMPS_CASE(ValBits, AddrBits) \
10891 IEM_MC_BEGIN(3, 3); \
10892 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
10893 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
10894 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10895 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
10896 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10897 \
10898 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10899 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
10900 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10901 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
10902 IEM_MC_REF_LOCAL(puValue1, uValue1); \
10903 IEM_MC_REF_EFLAGS(pEFlags); \
10904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
10905 \
10906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10907 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10908 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10909 } IEM_MC_ELSE() { \
10910 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10911 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10912 } IEM_MC_ENDIF(); \
10913 IEM_MC_ADVANCE_RIP(); \
10914 IEM_MC_END(); \
10915
10916/** Opcode 0xa6. */
10917FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
10918{
10919 IEMOP_HLP_NO_LOCK_PREFIX();
10920
10921 /*
10922 * Use the C implementation if a repeat prefix is encountered.
10923 */
10924 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10925 {
10926 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10927 switch (pIemCpu->enmEffAddrMode)
10928 {
10929 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
10930 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
10931 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
10932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10933 }
10934 }
10935 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10936 {
10937 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10938 switch (pIemCpu->enmEffAddrMode)
10939 {
10940 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
10941 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
10942 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
10943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10944 }
10945 }
10946 IEMOP_MNEMONIC("cmps Xb,Yb");
10947
10948 /*
10949 * Sharing case implementation with cmps[wdq] below.
10950 */
10951 switch (pIemCpu->enmEffAddrMode)
10952 {
10953 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
10954 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
10955 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
10956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10957 }
10958 return VINF_SUCCESS;
10959
10960}
10961
10962
10963/** Opcode 0xa7. */
10964FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
10965{
10966 IEMOP_HLP_NO_LOCK_PREFIX();
10967
10968 /*
10969 * Use the C implementation if a repeat prefix is encountered.
10970 */
10971 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10972 {
10973 IEMOP_MNEMONIC("repe cmps Xv,Yv");
10974 switch (pIemCpu->enmEffOpSize)
10975 {
10976 case IEMMODE_16BIT:
10977 switch (pIemCpu->enmEffAddrMode)
10978 {
10979 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
10980 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
10981 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
10982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10983 }
10984 break;
10985 case IEMMODE_32BIT:
10986 switch (pIemCpu->enmEffAddrMode)
10987 {
10988 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
10989 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
10990 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10992 }
10993 case IEMMODE_64BIT:
10994 switch (pIemCpu->enmEffAddrMode)
10995 {
10996 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10997 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
10998 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
10999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11000 }
11001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11002 }
11003 }
11004
11005 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11006 {
11007 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11008 switch (pIemCpu->enmEffOpSize)
11009 {
11010 case IEMMODE_16BIT:
11011 switch (pIemCpu->enmEffAddrMode)
11012 {
11013 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11014 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11015 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11017 }
11018 break;
11019 case IEMMODE_32BIT:
11020 switch (pIemCpu->enmEffAddrMode)
11021 {
11022 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11023 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11024 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11026 }
11027 case IEMMODE_64BIT:
11028 switch (pIemCpu->enmEffAddrMode)
11029 {
11030 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11031 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11032 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11034 }
11035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11036 }
11037 }
11038
11039 IEMOP_MNEMONIC("cmps Xv,Yv");
11040
11041 /*
11042 * Annoying double switch here.
11043 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11044 */
11045 switch (pIemCpu->enmEffOpSize)
11046 {
11047 case IEMMODE_16BIT:
11048 switch (pIemCpu->enmEffAddrMode)
11049 {
11050 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11051 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11052 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11054 }
11055 break;
11056
11057 case IEMMODE_32BIT:
11058 switch (pIemCpu->enmEffAddrMode)
11059 {
11060 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11061 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11062 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11064 }
11065 break;
11066
11067 case IEMMODE_64BIT:
11068 switch (pIemCpu->enmEffAddrMode)
11069 {
11070 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11071 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11072 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11074 }
11075 break;
11076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11077 }
11078 return VINF_SUCCESS;
11079
11080}
11081
11082#undef IEM_CMPS_CASE
11083
11084/** Opcode 0xa8. */
11085FNIEMOP_DEF(iemOp_test_AL_Ib)
11086{
11087 IEMOP_MNEMONIC("test al,Ib");
11088 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11089 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11090}
11091
11092
11093/** Opcode 0xa9. */
11094FNIEMOP_DEF(iemOp_test_eAX_Iz)
11095{
11096 IEMOP_MNEMONIC("test rAX,Iz");
11097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11098 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11099}
11100
11101
11102/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11103#define IEM_STOS_CASE(ValBits, AddrBits) \
11104 IEM_MC_BEGIN(0, 2); \
11105 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11106 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11107 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11108 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11109 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11111 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11112 } IEM_MC_ELSE() { \
11113 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11114 } IEM_MC_ENDIF(); \
11115 IEM_MC_ADVANCE_RIP(); \
11116 IEM_MC_END(); \
11117
11118/** Opcode 0xaa. */
11119FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11120{
11121 IEMOP_HLP_NO_LOCK_PREFIX();
11122
11123 /*
11124 * Use the C implementation if a repeat prefix is encountered.
11125 */
11126 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11127 {
11128 IEMOP_MNEMONIC("rep stos Yb,al");
11129 switch (pIemCpu->enmEffAddrMode)
11130 {
11131 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11132 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11133 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11135 }
11136 }
11137 IEMOP_MNEMONIC("stos Yb,al");
11138
11139 /*
11140 * Sharing case implementation with stos[wdq] below.
11141 */
11142 switch (pIemCpu->enmEffAddrMode)
11143 {
11144 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11145 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11146 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11148 }
11149 return VINF_SUCCESS;
11150}
11151
11152
11153/** Opcode 0xab. */
11154FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11155{
11156 IEMOP_HLP_NO_LOCK_PREFIX();
11157
11158 /*
11159 * Use the C implementation if a repeat prefix is encountered.
11160 */
11161 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11162 {
11163 IEMOP_MNEMONIC("rep stos Yv,rAX");
11164 switch (pIemCpu->enmEffOpSize)
11165 {
11166 case IEMMODE_16BIT:
11167 switch (pIemCpu->enmEffAddrMode)
11168 {
11169 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11170 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11171 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11173 }
11174 break;
11175 case IEMMODE_32BIT:
11176 switch (pIemCpu->enmEffAddrMode)
11177 {
11178 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11179 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11180 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11182 }
11183 case IEMMODE_64BIT:
11184 switch (pIemCpu->enmEffAddrMode)
11185 {
11186 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11187 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11188 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11190 }
11191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11192 }
11193 }
11194 IEMOP_MNEMONIC("stos Yv,rAX");
11195
11196 /*
11197 * Annoying double switch here.
11198 * Using ugly macro for implementing the cases, sharing it with stosb.
11199 */
11200 switch (pIemCpu->enmEffOpSize)
11201 {
11202 case IEMMODE_16BIT:
11203 switch (pIemCpu->enmEffAddrMode)
11204 {
11205 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11206 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11207 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11209 }
11210 break;
11211
11212 case IEMMODE_32BIT:
11213 switch (pIemCpu->enmEffAddrMode)
11214 {
11215 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11216 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11217 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11219 }
11220 break;
11221
11222 case IEMMODE_64BIT:
11223 switch (pIemCpu->enmEffAddrMode)
11224 {
11225 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11226 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11227 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11229 }
11230 break;
11231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11232 }
11233 return VINF_SUCCESS;
11234}
11235
11236#undef IEM_STOS_CASE
11237
11238/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11239#define IEM_LODS_CASE(ValBits, AddrBits) \
11240 IEM_MC_BEGIN(0, 2); \
11241 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11242 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11243 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11244 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11245 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11247 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11248 } IEM_MC_ELSE() { \
11249 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11250 } IEM_MC_ENDIF(); \
11251 IEM_MC_ADVANCE_RIP(); \
11252 IEM_MC_END();
11253
11254/** Opcode 0xac. */
11255FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11256{
11257 IEMOP_HLP_NO_LOCK_PREFIX();
11258
11259 /*
11260 * Use the C implementation if a repeat prefix is encountered.
11261 */
11262 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11263 {
11264 IEMOP_MNEMONIC("rep lodsb al,Xb");
11265 switch (pIemCpu->enmEffAddrMode)
11266 {
11267 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11268 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11269 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11271 }
11272 }
11273 IEMOP_MNEMONIC("lodsb al,Xb");
11274
11275 /*
11276 * Sharing case implementation with stos[wdq] below.
11277 */
11278 switch (pIemCpu->enmEffAddrMode)
11279 {
11280 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11281 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11282 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11284 }
11285 return VINF_SUCCESS;
11286}
11287
11288
11289/** Opcode 0xad. */
11290FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11291{
11292 IEMOP_HLP_NO_LOCK_PREFIX();
11293
11294 /*
11295 * Use the C implementation if a repeat prefix is encountered.
11296 */
11297 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11298 {
11299 IEMOP_MNEMONIC("rep lods rAX,Xv");
11300 switch (pIemCpu->enmEffOpSize)
11301 {
11302 case IEMMODE_16BIT:
11303 switch (pIemCpu->enmEffAddrMode)
11304 {
11305 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11306 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11307 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11309 }
11310 break;
11311 case IEMMODE_32BIT:
11312 switch (pIemCpu->enmEffAddrMode)
11313 {
11314 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11315 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11316 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11318 }
11319 case IEMMODE_64BIT:
11320 switch (pIemCpu->enmEffAddrMode)
11321 {
11322 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11323 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11324 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11326 }
11327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11328 }
11329 }
11330 IEMOP_MNEMONIC("lods rAX,Xv");
11331
11332 /*
11333 * Annoying double switch here.
11334 * Using ugly macro for implementing the cases, sharing it with lodsb.
11335 */
11336 switch (pIemCpu->enmEffOpSize)
11337 {
11338 case IEMMODE_16BIT:
11339 switch (pIemCpu->enmEffAddrMode)
11340 {
11341 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11342 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11343 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11345 }
11346 break;
11347
11348 case IEMMODE_32BIT:
11349 switch (pIemCpu->enmEffAddrMode)
11350 {
11351 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11352 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11353 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11355 }
11356 break;
11357
11358 case IEMMODE_64BIT:
11359 switch (pIemCpu->enmEffAddrMode)
11360 {
11361 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11362 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11363 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11365 }
11366 break;
11367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11368 }
11369 return VINF_SUCCESS;
11370}
11371
11372#undef IEM_LODS_CASE
11373
11374/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11375#define IEM_SCAS_CASE(ValBits, AddrBits) \
11376 IEM_MC_BEGIN(3, 2); \
11377 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11378 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11379 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11380 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11381 \
11382 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11383 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11384 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11385 IEM_MC_REF_EFLAGS(pEFlags); \
11386 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11387 \
11388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11389 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11390 } IEM_MC_ELSE() { \
11391 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11392 } IEM_MC_ENDIF(); \
11393 IEM_MC_ADVANCE_RIP(); \
11394 IEM_MC_END();
11395
11396/** Opcode 0xae. */
11397FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11398{
11399 IEMOP_HLP_NO_LOCK_PREFIX();
11400
11401 /*
11402 * Use the C implementation if a repeat prefix is encountered.
11403 */
11404 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11405 {
11406 IEMOP_MNEMONIC("repe scasb al,Xb");
11407 switch (pIemCpu->enmEffAddrMode)
11408 {
11409 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11410 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11411 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11413 }
11414 }
11415 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11416 {
11417 IEMOP_MNEMONIC("repne scasb al,Xb");
11418 switch (pIemCpu->enmEffAddrMode)
11419 {
11420 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11421 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11422 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11424 }
11425 }
11426 IEMOP_MNEMONIC("scasb al,Xb");
11427
11428 /*
11429 * Sharing case implementation with stos[wdq] below.
11430 */
11431 switch (pIemCpu->enmEffAddrMode)
11432 {
11433 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11434 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11435 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11437 }
11438 return VINF_SUCCESS;
11439}
11440
11441
11442/** Opcode 0xaf. */
11443FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11444{
11445 IEMOP_HLP_NO_LOCK_PREFIX();
11446
11447 /*
11448 * Use the C implementation if a repeat prefix is encountered.
11449 */
11450 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11451 {
11452 IEMOP_MNEMONIC("repe scas rAX,Xv");
11453 switch (pIemCpu->enmEffOpSize)
11454 {
11455 case IEMMODE_16BIT:
11456 switch (pIemCpu->enmEffAddrMode)
11457 {
11458 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11459 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11460 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11462 }
11463 break;
11464 case IEMMODE_32BIT:
11465 switch (pIemCpu->enmEffAddrMode)
11466 {
11467 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11468 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11469 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11471 }
11472 case IEMMODE_64BIT:
11473 switch (pIemCpu->enmEffAddrMode)
11474 {
11475 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11476 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11477 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11479 }
11480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11481 }
11482 }
11483 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11484 {
11485 IEMOP_MNEMONIC("repne scas rAX,Xv");
11486 switch (pIemCpu->enmEffOpSize)
11487 {
11488 case IEMMODE_16BIT:
11489 switch (pIemCpu->enmEffAddrMode)
11490 {
11491 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11492 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11493 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11495 }
11496 break;
11497 case IEMMODE_32BIT:
11498 switch (pIemCpu->enmEffAddrMode)
11499 {
11500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 case IEMMODE_64BIT:
11506 switch (pIemCpu->enmEffAddrMode)
11507 {
11508 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11509 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11510 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11512 }
11513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11514 }
11515 }
11516 IEMOP_MNEMONIC("scas rAX,Xv");
11517
11518 /*
11519 * Annoying double switch here.
11520 * Using ugly macro for implementing the cases, sharing it with scasb.
11521 */
11522 switch (pIemCpu->enmEffOpSize)
11523 {
11524 case IEMMODE_16BIT:
11525 switch (pIemCpu->enmEffAddrMode)
11526 {
11527 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11528 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11529 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11531 }
11532 break;
11533
11534 case IEMMODE_32BIT:
11535 switch (pIemCpu->enmEffAddrMode)
11536 {
11537 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11538 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11539 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11541 }
11542 break;
11543
11544 case IEMMODE_64BIT:
11545 switch (pIemCpu->enmEffAddrMode)
11546 {
11547 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11548 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11549 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11551 }
11552 break;
11553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11554 }
11555 return VINF_SUCCESS;
11556}
11557
11558#undef IEM_SCAS_CASE
11559
11560/**
11561 * Common 'mov r8, imm8' helper.
11562 */
11563FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11564{
11565 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11566 IEMOP_HLP_NO_LOCK_PREFIX();
11567
11568 IEM_MC_BEGIN(0, 1);
11569 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11570 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11571 IEM_MC_ADVANCE_RIP();
11572 IEM_MC_END();
11573
11574 return VINF_SUCCESS;
11575}
11576
11577
11578/** Opcode 0xb0. */
11579FNIEMOP_DEF(iemOp_mov_AL_Ib)
11580{
11581 IEMOP_MNEMONIC("mov AL,Ib");
11582 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11583}
11584
11585
11586/** Opcode 0xb1. */
11587FNIEMOP_DEF(iemOp_CL_Ib)
11588{
11589 IEMOP_MNEMONIC("mov CL,Ib");
11590 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11591}
11592
11593
11594/** Opcode 0xb2. */
11595FNIEMOP_DEF(iemOp_DL_Ib)
11596{
11597 IEMOP_MNEMONIC("mov DL,Ib");
11598 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11599}
11600
11601
11602/** Opcode 0xb3. */
11603FNIEMOP_DEF(iemOp_BL_Ib)
11604{
11605 IEMOP_MNEMONIC("mov BL,Ib");
11606 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11607}
11608
11609
11610/** Opcode 0xb4. */
11611FNIEMOP_DEF(iemOp_mov_AH_Ib)
11612{
11613 IEMOP_MNEMONIC("mov AH,Ib");
11614 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11615}
11616
11617
11618/** Opcode 0xb5. */
11619FNIEMOP_DEF(iemOp_CH_Ib)
11620{
11621 IEMOP_MNEMONIC("mov CH,Ib");
11622 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11623}
11624
11625
11626/** Opcode 0xb6. */
11627FNIEMOP_DEF(iemOp_DH_Ib)
11628{
11629 IEMOP_MNEMONIC("mov DH,Ib");
11630 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11631}
11632
11633
11634/** Opcode 0xb7. */
11635FNIEMOP_DEF(iemOp_BH_Ib)
11636{
11637 IEMOP_MNEMONIC("mov BH,Ib");
11638 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11639}
11640
11641
11642/**
11643 * Common 'mov regX,immX' helper.
11644 */
11645FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11646{
11647 switch (pIemCpu->enmEffOpSize)
11648 {
11649 case IEMMODE_16BIT:
11650 {
11651 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11652 IEMOP_HLP_NO_LOCK_PREFIX();
11653
11654 IEM_MC_BEGIN(0, 1);
11655 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11656 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11657 IEM_MC_ADVANCE_RIP();
11658 IEM_MC_END();
11659 break;
11660 }
11661
11662 case IEMMODE_32BIT:
11663 {
11664 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11665 IEMOP_HLP_NO_LOCK_PREFIX();
11666
11667 IEM_MC_BEGIN(0, 1);
11668 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11669 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11670 IEM_MC_ADVANCE_RIP();
11671 IEM_MC_END();
11672 break;
11673 }
11674 case IEMMODE_64BIT:
11675 {
11676 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11677 IEMOP_HLP_NO_LOCK_PREFIX();
11678
11679 IEM_MC_BEGIN(0, 1);
11680 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11681 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11682 IEM_MC_ADVANCE_RIP();
11683 IEM_MC_END();
11684 break;
11685 }
11686 }
11687
11688 return VINF_SUCCESS;
11689}
11690
11691
11692/** Opcode 0xb8. */
11693FNIEMOP_DEF(iemOp_eAX_Iv)
11694{
11695 IEMOP_MNEMONIC("mov rAX,IV");
11696 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11697}
11698
11699
11700/** Opcode 0xb9. */
11701FNIEMOP_DEF(iemOp_eCX_Iv)
11702{
11703 IEMOP_MNEMONIC("mov rCX,IV");
11704 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11705}
11706
11707
11708/** Opcode 0xba. */
11709FNIEMOP_DEF(iemOp_eDX_Iv)
11710{
11711 IEMOP_MNEMONIC("mov rDX,IV");
11712 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11713}
11714
11715
11716/** Opcode 0xbb. */
11717FNIEMOP_DEF(iemOp_eBX_Iv)
11718{
11719 IEMOP_MNEMONIC("mov rBX,IV");
11720 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11721}
11722
11723
11724/** Opcode 0xbc. */
11725FNIEMOP_DEF(iemOp_eSP_Iv)
11726{
11727 IEMOP_MNEMONIC("mov rSP,IV");
11728 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11729}
11730
11731
11732/** Opcode 0xbd. */
11733FNIEMOP_DEF(iemOp_eBP_Iv)
11734{
11735 IEMOP_MNEMONIC("mov rBP,IV");
11736 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11737}
11738
11739
11740/** Opcode 0xbe. */
11741FNIEMOP_DEF(iemOp_eSI_Iv)
11742{
11743 IEMOP_MNEMONIC("mov rSI,IV");
11744 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11745}
11746
11747
11748/** Opcode 0xbf. */
11749FNIEMOP_DEF(iemOp_eDI_Iv)
11750{
11751 IEMOP_MNEMONIC("mov rDI,IV");
11752 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11753}
11754
11755
11756/** Opcode 0xc0. */
11757FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11758{
11759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11760 PCIEMOPSHIFTSIZES pImpl;
11761 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11762 {
11763 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11764 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11765 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11766 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11767 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11768 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11769 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11770 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11771 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11772 }
11773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11774
11775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11776 {
11777 /* register */
11778 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11779 IEMOP_HLP_NO_LOCK_PREFIX();
11780 IEM_MC_BEGIN(3, 0);
11781 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11782 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11783 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11784 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11785 IEM_MC_REF_EFLAGS(pEFlags);
11786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11787 IEM_MC_ADVANCE_RIP();
11788 IEM_MC_END();
11789 }
11790 else
11791 {
11792 /* memory */
11793 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11794 IEM_MC_BEGIN(3, 2);
11795 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11796 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11797 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11799
11800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11801 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11802 IEM_MC_ASSIGN(cShiftArg, cShift);
11803 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11804 IEM_MC_FETCH_EFLAGS(EFlags);
11805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11806
11807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11808 IEM_MC_COMMIT_EFLAGS(EFlags);
11809 IEM_MC_ADVANCE_RIP();
11810 IEM_MC_END();
11811 }
11812 return VINF_SUCCESS;
11813}
11814
11815
11816/** Opcode 0xc1. */
11817FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11818{
11819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11820 PCIEMOPSHIFTSIZES pImpl;
11821 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11822 {
11823 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11824 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11825 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11826 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11827 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11828 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11829 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11830 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11831 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11832 }
11833 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11834
11835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11836 {
11837 /* register */
11838 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11839 IEMOP_HLP_NO_LOCK_PREFIX();
11840 switch (pIemCpu->enmEffOpSize)
11841 {
11842 case IEMMODE_16BIT:
11843 IEM_MC_BEGIN(3, 0);
11844 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11845 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11846 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11847 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11848 IEM_MC_REF_EFLAGS(pEFlags);
11849 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11850 IEM_MC_ADVANCE_RIP();
11851 IEM_MC_END();
11852 return VINF_SUCCESS;
11853
11854 case IEMMODE_32BIT:
11855 IEM_MC_BEGIN(3, 0);
11856 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11857 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11858 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11859 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11860 IEM_MC_REF_EFLAGS(pEFlags);
11861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11862 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11863 IEM_MC_ADVANCE_RIP();
11864 IEM_MC_END();
11865 return VINF_SUCCESS;
11866
11867 case IEMMODE_64BIT:
11868 IEM_MC_BEGIN(3, 0);
11869 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11870 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11872 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11873 IEM_MC_REF_EFLAGS(pEFlags);
11874 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11875 IEM_MC_ADVANCE_RIP();
11876 IEM_MC_END();
11877 return VINF_SUCCESS;
11878
11879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11880 }
11881 }
11882 else
11883 {
11884 /* memory */
11885 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11886 switch (pIemCpu->enmEffOpSize)
11887 {
11888 case IEMMODE_16BIT:
11889 IEM_MC_BEGIN(3, 2);
11890 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11891 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11892 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11894
11895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11896 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11897 IEM_MC_ASSIGN(cShiftArg, cShift);
11898 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11899 IEM_MC_FETCH_EFLAGS(EFlags);
11900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11901
11902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11903 IEM_MC_COMMIT_EFLAGS(EFlags);
11904 IEM_MC_ADVANCE_RIP();
11905 IEM_MC_END();
11906 return VINF_SUCCESS;
11907
11908 case IEMMODE_32BIT:
11909 IEM_MC_BEGIN(3, 2);
11910 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11911 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11912 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11914
11915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11916 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11917 IEM_MC_ASSIGN(cShiftArg, cShift);
11918 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11919 IEM_MC_FETCH_EFLAGS(EFlags);
11920 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11921
11922 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11923 IEM_MC_COMMIT_EFLAGS(EFlags);
11924 IEM_MC_ADVANCE_RIP();
11925 IEM_MC_END();
11926 return VINF_SUCCESS;
11927
11928 case IEMMODE_64BIT:
11929 IEM_MC_BEGIN(3, 2);
11930 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11931 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11932 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11934
11935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11936 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11937 IEM_MC_ASSIGN(cShiftArg, cShift);
11938 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11939 IEM_MC_FETCH_EFLAGS(EFlags);
11940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11941
11942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11943 IEM_MC_COMMIT_EFLAGS(EFlags);
11944 IEM_MC_ADVANCE_RIP();
11945 IEM_MC_END();
11946 return VINF_SUCCESS;
11947
11948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11949 }
11950 }
11951}
11952
11953
11954/** Opcode 0xc2. */
11955FNIEMOP_DEF(iemOp_retn_Iw)
11956{
11957 IEMOP_MNEMONIC("retn Iw");
11958 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11959 IEMOP_HLP_NO_LOCK_PREFIX();
11960 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11961 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
11962}
11963
11964
11965/** Opcode 0xc3. */
11966FNIEMOP_DEF(iemOp_retn)
11967{
11968 IEMOP_MNEMONIC("retn");
11969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11970 IEMOP_HLP_NO_LOCK_PREFIX();
11971 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
11972}
11973
11974
11975/** Opcode 0xc4. */
11976FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
11977{
11978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11979 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11980 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11981 {
11982 IEMOP_MNEMONIC("2-byte-vex");
11983 /* The LES instruction is invalid 64-bit mode. In legacy and
11984 compatability mode it is invalid with MOD=3.
11985 The use as a VEX prefix is made possible by assigning the inverted
11986 REX.R to the top MOD bit, and the top bit in the inverted register
11987 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
11988 to accessing registers 0..7 in this VEX form. */
11989 /** @todo VEX: Just use new tables for it. */
11990 return IEMOP_RAISE_INVALID_OPCODE();
11991 }
11992 IEMOP_MNEMONIC("les Gv,Mp");
11993 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
11994}
11995
11996
11997/** Opcode 0xc5. */
11998FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
11999{
12000 /* The LDS instruction is invalid 64-bit mode. In legacy and
12001 compatability mode it is invalid with MOD=3.
12002 The use as a VEX prefix is made possible by assigning the inverted
12003 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12004 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12006 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12007 {
12008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12009 {
12010 IEMOP_MNEMONIC("lds Gv,Mp");
12011 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12012 }
12013 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12014 }
12015
12016 IEMOP_MNEMONIC("3-byte-vex");
12017 /** @todo Test when exctly the VEX conformance checks kick in during
12018 * instruction decoding and fetching (using \#PF). */
12019 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12020 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12021 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12022#if 0 /* will make sense of this next week... */
12023 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12024 &&
12025 )
12026 {
12027
12028 }
12029#endif
12030
12031 /** @todo VEX: Just use new tables for it. */
12032 return IEMOP_RAISE_INVALID_OPCODE();
12033}
12034
12035
12036/** Opcode 0xc6. */
12037FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12038{
12039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12040 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12041 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12042 return IEMOP_RAISE_INVALID_OPCODE();
12043 IEMOP_MNEMONIC("mov Eb,Ib");
12044
12045 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12046 {
12047 /* register access */
12048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12049 IEM_MC_BEGIN(0, 0);
12050 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12051 IEM_MC_ADVANCE_RIP();
12052 IEM_MC_END();
12053 }
12054 else
12055 {
12056 /* memory access. */
12057 IEM_MC_BEGIN(0, 1);
12058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12060 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12061 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12062 IEM_MC_ADVANCE_RIP();
12063 IEM_MC_END();
12064 }
12065 return VINF_SUCCESS;
12066}
12067
12068
12069/** Opcode 0xc7. */
12070FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12071{
12072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12073 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12074 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12075 return IEMOP_RAISE_INVALID_OPCODE();
12076 IEMOP_MNEMONIC("mov Ev,Iz");
12077
12078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12079 {
12080 /* register access */
12081 switch (pIemCpu->enmEffOpSize)
12082 {
12083 case IEMMODE_16BIT:
12084 IEM_MC_BEGIN(0, 0);
12085 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12086 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12087 IEM_MC_ADVANCE_RIP();
12088 IEM_MC_END();
12089 return VINF_SUCCESS;
12090
12091 case IEMMODE_32BIT:
12092 IEM_MC_BEGIN(0, 0);
12093 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12094 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12095 IEM_MC_ADVANCE_RIP();
12096 IEM_MC_END();
12097 return VINF_SUCCESS;
12098
12099 case IEMMODE_64BIT:
12100 IEM_MC_BEGIN(0, 0);
12101 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12102 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12103 IEM_MC_ADVANCE_RIP();
12104 IEM_MC_END();
12105 return VINF_SUCCESS;
12106
12107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12108 }
12109 }
12110 else
12111 {
12112 /* memory access. */
12113 switch (pIemCpu->enmEffOpSize)
12114 {
12115 case IEMMODE_16BIT:
12116 IEM_MC_BEGIN(0, 1);
12117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12119 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12120 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12121 IEM_MC_ADVANCE_RIP();
12122 IEM_MC_END();
12123 return VINF_SUCCESS;
12124
12125 case IEMMODE_32BIT:
12126 IEM_MC_BEGIN(0, 1);
12127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12129 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12130 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12131 IEM_MC_ADVANCE_RIP();
12132 IEM_MC_END();
12133 return VINF_SUCCESS;
12134
12135 case IEMMODE_64BIT:
12136 IEM_MC_BEGIN(0, 1);
12137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12139 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12140 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12141 IEM_MC_ADVANCE_RIP();
12142 IEM_MC_END();
12143 return VINF_SUCCESS;
12144
12145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12146 }
12147 }
12148}
12149
12150
12151
12152
12153/** Opcode 0xc8. */
12154FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12155{
12156 IEMOP_MNEMONIC("enter Iw,Ib");
12157 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12158 IEMOP_HLP_NO_LOCK_PREFIX();
12159 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12160 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12161 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12162}
12163
12164
12165/** Opcode 0xc9. */
12166FNIEMOP_DEF(iemOp_leave)
12167{
12168 IEMOP_MNEMONIC("retn");
12169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12170 IEMOP_HLP_NO_LOCK_PREFIX();
12171 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12172}
12173
12174
12175/** Opcode 0xca. */
12176FNIEMOP_DEF(iemOp_retf_Iw)
12177{
12178 IEMOP_MNEMONIC("retf Iw");
12179 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12180 IEMOP_HLP_NO_LOCK_PREFIX();
12181 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12182 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12183}
12184
12185
12186/** Opcode 0xcb. */
12187FNIEMOP_DEF(iemOp_retf)
12188{
12189 IEMOP_MNEMONIC("retf");
12190 IEMOP_HLP_NO_LOCK_PREFIX();
12191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12193}
12194
12195
12196/** Opcode 0xcc. */
12197FNIEMOP_DEF(iemOp_int_3)
12198{
12199 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12200}
12201
12202
12203/** Opcode 0xcd. */
12204FNIEMOP_DEF(iemOp_int_Ib)
12205{
12206 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12207 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12208}
12209
12210
12211/** Opcode 0xce. */
12212FNIEMOP_DEF(iemOp_into)
12213{
12214 IEMOP_MNEMONIC("into");
12215 IEMOP_HLP_NO_64BIT();
12216
12217 IEM_MC_BEGIN(2, 0);
12218 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12219 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12220 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12221 IEM_MC_END();
12222 return VINF_SUCCESS;
12223}
12224
12225
12226/** Opcode 0xcf. */
12227FNIEMOP_DEF(iemOp_iret)
12228{
12229 IEMOP_MNEMONIC("iret");
12230 IEMOP_HLP_NO_LOCK_PREFIX();
12231 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12232}
12233
12234
12235/** Opcode 0xd0. */
12236FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12237{
12238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12239 PCIEMOPSHIFTSIZES pImpl;
12240 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12241 {
12242 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12243 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12244 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12245 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12246 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12247 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12248 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12249 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12250 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12251 }
12252 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12253
12254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12255 {
12256 /* register */
12257 IEMOP_HLP_NO_LOCK_PREFIX();
12258 IEM_MC_BEGIN(3, 0);
12259 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12260 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12261 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12262 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12263 IEM_MC_REF_EFLAGS(pEFlags);
12264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12265 IEM_MC_ADVANCE_RIP();
12266 IEM_MC_END();
12267 }
12268 else
12269 {
12270 /* memory */
12271 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12272 IEM_MC_BEGIN(3, 2);
12273 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12274 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12275 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12277
12278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12279 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12280 IEM_MC_FETCH_EFLAGS(EFlags);
12281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12282
12283 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12284 IEM_MC_COMMIT_EFLAGS(EFlags);
12285 IEM_MC_ADVANCE_RIP();
12286 IEM_MC_END();
12287 }
12288 return VINF_SUCCESS;
12289}
12290
12291
12292
12293/** Opcode 0xd1. */
12294FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12295{
12296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12297 PCIEMOPSHIFTSIZES pImpl;
12298 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12299 {
12300 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12301 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12302 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12303 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12304 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12305 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12306 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12307 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12309 }
12310 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12311
12312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12313 {
12314 /* register */
12315 IEMOP_HLP_NO_LOCK_PREFIX();
12316 switch (pIemCpu->enmEffOpSize)
12317 {
12318 case IEMMODE_16BIT:
12319 IEM_MC_BEGIN(3, 0);
12320 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12321 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12322 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12323 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12324 IEM_MC_REF_EFLAGS(pEFlags);
12325 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12326 IEM_MC_ADVANCE_RIP();
12327 IEM_MC_END();
12328 return VINF_SUCCESS;
12329
12330 case IEMMODE_32BIT:
12331 IEM_MC_BEGIN(3, 0);
12332 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12333 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12335 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12336 IEM_MC_REF_EFLAGS(pEFlags);
12337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12338 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12339 IEM_MC_ADVANCE_RIP();
12340 IEM_MC_END();
12341 return VINF_SUCCESS;
12342
12343 case IEMMODE_64BIT:
12344 IEM_MC_BEGIN(3, 0);
12345 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12346 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12347 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12348 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12349 IEM_MC_REF_EFLAGS(pEFlags);
12350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12351 IEM_MC_ADVANCE_RIP();
12352 IEM_MC_END();
12353 return VINF_SUCCESS;
12354
12355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12356 }
12357 }
12358 else
12359 {
12360 /* memory */
12361 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12362 switch (pIemCpu->enmEffOpSize)
12363 {
12364 case IEMMODE_16BIT:
12365 IEM_MC_BEGIN(3, 2);
12366 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12367 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12368 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12370
12371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12372 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12373 IEM_MC_FETCH_EFLAGS(EFlags);
12374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12375
12376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12377 IEM_MC_COMMIT_EFLAGS(EFlags);
12378 IEM_MC_ADVANCE_RIP();
12379 IEM_MC_END();
12380 return VINF_SUCCESS;
12381
12382 case IEMMODE_32BIT:
12383 IEM_MC_BEGIN(3, 2);
12384 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12385 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12386 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12388
12389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12390 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12391 IEM_MC_FETCH_EFLAGS(EFlags);
12392 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12393
12394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12395 IEM_MC_COMMIT_EFLAGS(EFlags);
12396 IEM_MC_ADVANCE_RIP();
12397 IEM_MC_END();
12398 return VINF_SUCCESS;
12399
12400 case IEMMODE_64BIT:
12401 IEM_MC_BEGIN(3, 2);
12402 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12403 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12404 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12406
12407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12408 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12409 IEM_MC_FETCH_EFLAGS(EFlags);
12410 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12411
12412 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12413 IEM_MC_COMMIT_EFLAGS(EFlags);
12414 IEM_MC_ADVANCE_RIP();
12415 IEM_MC_END();
12416 return VINF_SUCCESS;
12417
12418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12419 }
12420 }
12421}
12422
12423
12424/** Opcode 0xd2. */
12425FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12426{
12427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12428 PCIEMOPSHIFTSIZES pImpl;
12429 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12430 {
12431 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12432 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12433 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12434 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12435 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12436 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12437 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12438 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12439 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12440 }
12441 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12442
12443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12444 {
12445 /* register */
12446 IEMOP_HLP_NO_LOCK_PREFIX();
12447 IEM_MC_BEGIN(3, 0);
12448 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12449 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12450 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12451 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12452 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12453 IEM_MC_REF_EFLAGS(pEFlags);
12454 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12455 IEM_MC_ADVANCE_RIP();
12456 IEM_MC_END();
12457 }
12458 else
12459 {
12460 /* memory */
12461 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12462 IEM_MC_BEGIN(3, 2);
12463 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12464 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12465 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12467
12468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12469 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12470 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12471 IEM_MC_FETCH_EFLAGS(EFlags);
12472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12473
12474 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12475 IEM_MC_COMMIT_EFLAGS(EFlags);
12476 IEM_MC_ADVANCE_RIP();
12477 IEM_MC_END();
12478 }
12479 return VINF_SUCCESS;
12480}
12481
12482
12483/** Opcode 0xd3. */
12484FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12485{
12486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12487 PCIEMOPSHIFTSIZES pImpl;
12488 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12489 {
12490 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12491 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12492 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12493 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12494 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12495 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12496 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12497 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12498 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12499 }
12500 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12501
12502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12503 {
12504 /* register */
12505 IEMOP_HLP_NO_LOCK_PREFIX();
12506 switch (pIemCpu->enmEffOpSize)
12507 {
12508 case IEMMODE_16BIT:
12509 IEM_MC_BEGIN(3, 0);
12510 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12511 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12512 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12513 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12514 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12515 IEM_MC_REF_EFLAGS(pEFlags);
12516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12517 IEM_MC_ADVANCE_RIP();
12518 IEM_MC_END();
12519 return VINF_SUCCESS;
12520
12521 case IEMMODE_32BIT:
12522 IEM_MC_BEGIN(3, 0);
12523 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12524 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12526 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12527 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12528 IEM_MC_REF_EFLAGS(pEFlags);
12529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12530 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12531 IEM_MC_ADVANCE_RIP();
12532 IEM_MC_END();
12533 return VINF_SUCCESS;
12534
12535 case IEMMODE_64BIT:
12536 IEM_MC_BEGIN(3, 0);
12537 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12538 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12539 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12540 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12541 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12542 IEM_MC_REF_EFLAGS(pEFlags);
12543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12544 IEM_MC_ADVANCE_RIP();
12545 IEM_MC_END();
12546 return VINF_SUCCESS;
12547
12548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12549 }
12550 }
12551 else
12552 {
12553 /* memory */
12554 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12555 switch (pIemCpu->enmEffOpSize)
12556 {
12557 case IEMMODE_16BIT:
12558 IEM_MC_BEGIN(3, 2);
12559 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12560 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12561 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12563
12564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12565 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12566 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12567 IEM_MC_FETCH_EFLAGS(EFlags);
12568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12569
12570 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12571 IEM_MC_COMMIT_EFLAGS(EFlags);
12572 IEM_MC_ADVANCE_RIP();
12573 IEM_MC_END();
12574 return VINF_SUCCESS;
12575
12576 case IEMMODE_32BIT:
12577 IEM_MC_BEGIN(3, 2);
12578 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12579 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12580 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12582
12583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12584 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12585 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12586 IEM_MC_FETCH_EFLAGS(EFlags);
12587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12588
12589 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12590 IEM_MC_COMMIT_EFLAGS(EFlags);
12591 IEM_MC_ADVANCE_RIP();
12592 IEM_MC_END();
12593 return VINF_SUCCESS;
12594
12595 case IEMMODE_64BIT:
12596 IEM_MC_BEGIN(3, 2);
12597 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12598 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12599 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12601
12602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12603 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12604 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12605 IEM_MC_FETCH_EFLAGS(EFlags);
12606 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12607
12608 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12609 IEM_MC_COMMIT_EFLAGS(EFlags);
12610 IEM_MC_ADVANCE_RIP();
12611 IEM_MC_END();
12612 return VINF_SUCCESS;
12613
12614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12615 }
12616 }
12617}
12618
12619/** Opcode 0xd4. */
12620FNIEMOP_DEF(iemOp_aam_Ib)
12621{
12622 IEMOP_MNEMONIC("aam Ib");
12623 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12624 IEMOP_HLP_NO_LOCK_PREFIX();
12625 IEMOP_HLP_NO_64BIT();
12626 if (!bImm)
12627 return IEMOP_RAISE_DIVIDE_ERROR();
12628 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12629}
12630
12631
12632/** Opcode 0xd5. */
12633FNIEMOP_DEF(iemOp_aad_Ib)
12634{
12635 IEMOP_MNEMONIC("aad Ib");
12636 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12637 IEMOP_HLP_NO_LOCK_PREFIX();
12638 IEMOP_HLP_NO_64BIT();
12639 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12640}
12641
12642
12643/** Opcode 0xd6. */
12644FNIEMOP_DEF(iemOp_salc)
12645{
12646 IEMOP_MNEMONIC("salc");
12647 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12649 IEMOP_HLP_NO_64BIT();
12650
12651 IEM_MC_BEGIN(0, 0);
12652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12653 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12654 } IEM_MC_ELSE() {
12655 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12656 } IEM_MC_ENDIF();
12657 IEM_MC_ADVANCE_RIP();
12658 IEM_MC_END();
12659 return VINF_SUCCESS;
12660}
12661
12662
12663/** Opcode 0xd7. */
12664FNIEMOP_DEF(iemOp_xlat)
12665{
12666 IEMOP_MNEMONIC("xlat");
12667 IEMOP_HLP_NO_LOCK_PREFIX();
12668 switch (pIemCpu->enmEffAddrMode)
12669 {
12670 case IEMMODE_16BIT:
12671 IEM_MC_BEGIN(2, 0);
12672 IEM_MC_LOCAL(uint8_t, u8Tmp);
12673 IEM_MC_LOCAL(uint16_t, u16Addr);
12674 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12675 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12676 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12677 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12678 IEM_MC_ADVANCE_RIP();
12679 IEM_MC_END();
12680 return VINF_SUCCESS;
12681
12682 case IEMMODE_32BIT:
12683 IEM_MC_BEGIN(2, 0);
12684 IEM_MC_LOCAL(uint8_t, u8Tmp);
12685 IEM_MC_LOCAL(uint32_t, u32Addr);
12686 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12687 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12688 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12689 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12690 IEM_MC_ADVANCE_RIP();
12691 IEM_MC_END();
12692 return VINF_SUCCESS;
12693
12694 case IEMMODE_64BIT:
12695 IEM_MC_BEGIN(2, 0);
12696 IEM_MC_LOCAL(uint8_t, u8Tmp);
12697 IEM_MC_LOCAL(uint64_t, u64Addr);
12698 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12699 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12700 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12701 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12702 IEM_MC_ADVANCE_RIP();
12703 IEM_MC_END();
12704 return VINF_SUCCESS;
12705
12706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12707 }
12708}
12709
12710
12711/**
12712 * Common worker for FPU instructions working on ST0 and STn, and storing the
12713 * result in ST0.
12714 *
12715 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12716 */
12717FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12718{
12719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12720
12721 IEM_MC_BEGIN(3, 1);
12722 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12723 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12724 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12726
12727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12729 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12730 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12731 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12732 IEM_MC_ELSE()
12733 IEM_MC_FPU_STACK_UNDERFLOW(0);
12734 IEM_MC_ENDIF();
12735 IEM_MC_USED_FPU();
12736 IEM_MC_ADVANCE_RIP();
12737
12738 IEM_MC_END();
12739 return VINF_SUCCESS;
12740}
12741
12742
12743/**
12744 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12745 * flags.
12746 *
12747 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12748 */
12749FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12750{
12751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12752
12753 IEM_MC_BEGIN(3, 1);
12754 IEM_MC_LOCAL(uint16_t, u16Fsw);
12755 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12756 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12757 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12758
12759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12761 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12762 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12763 IEM_MC_UPDATE_FSW(u16Fsw);
12764 IEM_MC_ELSE()
12765 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12766 IEM_MC_ENDIF();
12767 IEM_MC_USED_FPU();
12768 IEM_MC_ADVANCE_RIP();
12769
12770 IEM_MC_END();
12771 return VINF_SUCCESS;
12772}
12773
12774
12775/**
12776 * Common worker for FPU instructions working on ST0 and STn, only affecting
12777 * flags, and popping when done.
12778 *
12779 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12780 */
12781FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12782{
12783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12784
12785 IEM_MC_BEGIN(3, 1);
12786 IEM_MC_LOCAL(uint16_t, u16Fsw);
12787 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12788 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12790
12791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12792 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12793 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12794 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12795 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12796 IEM_MC_ELSE()
12797 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12798 IEM_MC_ENDIF();
12799 IEM_MC_USED_FPU();
12800 IEM_MC_ADVANCE_RIP();
12801
12802 IEM_MC_END();
12803 return VINF_SUCCESS;
12804}
12805
12806
12807/** Opcode 0xd8 11/0. */
12808FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12809{
12810 IEMOP_MNEMONIC("fadd st0,stN");
12811 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12812}
12813
12814
12815/** Opcode 0xd8 11/1. */
12816FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12817{
12818 IEMOP_MNEMONIC("fmul st0,stN");
12819 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12820}
12821
12822
12823/** Opcode 0xd8 11/2. */
12824FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12825{
12826 IEMOP_MNEMONIC("fcom st0,stN");
12827 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12828}
12829
12830
12831/** Opcode 0xd8 11/3. */
12832FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12833{
12834 IEMOP_MNEMONIC("fcomp st0,stN");
12835 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12836}
12837
12838
12839/** Opcode 0xd8 11/4. */
12840FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12841{
12842 IEMOP_MNEMONIC("fsub st0,stN");
12843 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12844}
12845
12846
12847/** Opcode 0xd8 11/5. */
12848FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12849{
12850 IEMOP_MNEMONIC("fsubr st0,stN");
12851 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
12852}
12853
12854
12855/** Opcode 0xd8 11/6. */
12856FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
12857{
12858 IEMOP_MNEMONIC("fdiv st0,stN");
12859 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
12860}
12861
12862
12863/** Opcode 0xd8 11/7. */
12864FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
12865{
12866 IEMOP_MNEMONIC("fdivr st0,stN");
12867 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
12868}
12869
12870
12871/**
12872 * Common worker for FPU instructions working on ST0 and an m32r, and storing
12873 * the result in ST0.
12874 *
12875 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12876 */
12877FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
12878{
12879 IEM_MC_BEGIN(3, 3);
12880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12881 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12882 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12883 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12884 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12885 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12886
12887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12889
12890 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12891 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12892 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12893
12894 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12895 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
12896 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12897 IEM_MC_ELSE()
12898 IEM_MC_FPU_STACK_UNDERFLOW(0);
12899 IEM_MC_ENDIF();
12900 IEM_MC_USED_FPU();
12901 IEM_MC_ADVANCE_RIP();
12902
12903 IEM_MC_END();
12904 return VINF_SUCCESS;
12905}
12906
12907
12908/** Opcode 0xd8 !11/0. */
12909FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
12910{
12911 IEMOP_MNEMONIC("fadd st0,m32r");
12912 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
12913}
12914
12915
12916/** Opcode 0xd8 !11/1. */
12917FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
12918{
12919 IEMOP_MNEMONIC("fmul st0,m32r");
12920 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
12921}
12922
12923
12924/** Opcode 0xd8 !11/2. */
12925FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
12926{
12927 IEMOP_MNEMONIC("fcom st0,m32r");
12928
12929 IEM_MC_BEGIN(3, 3);
12930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12931 IEM_MC_LOCAL(uint16_t, u16Fsw);
12932 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12933 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12935 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12936
12937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12939
12940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12942 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12943
12944 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12945 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12946 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12947 IEM_MC_ELSE()
12948 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12949 IEM_MC_ENDIF();
12950 IEM_MC_USED_FPU();
12951 IEM_MC_ADVANCE_RIP();
12952
12953 IEM_MC_END();
12954 return VINF_SUCCESS;
12955}
12956
12957
12958/** Opcode 0xd8 !11/3. */
12959FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
12960{
12961 IEMOP_MNEMONIC("fcomp st0,m32r");
12962
12963 IEM_MC_BEGIN(3, 3);
12964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12965 IEM_MC_LOCAL(uint16_t, u16Fsw);
12966 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12967 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12968 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12969 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12970
12971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12973
12974 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12975 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12976 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12977
12978 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12979 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12980 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12981 IEM_MC_ELSE()
12982 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12983 IEM_MC_ENDIF();
12984 IEM_MC_USED_FPU();
12985 IEM_MC_ADVANCE_RIP();
12986
12987 IEM_MC_END();
12988 return VINF_SUCCESS;
12989}
12990
12991
12992/** Opcode 0xd8 !11/4. */
12993FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
12994{
12995 IEMOP_MNEMONIC("fsub st0,m32r");
12996 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
12997}
12998
12999
13000/** Opcode 0xd8 !11/5. */
13001FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13002{
13003 IEMOP_MNEMONIC("fsubr st0,m32r");
13004 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13005}
13006
13007
13008/** Opcode 0xd8 !11/6. */
13009FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13010{
13011 IEMOP_MNEMONIC("fdiv st0,m32r");
13012 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13013}
13014
13015
13016/** Opcode 0xd8 !11/7. */
13017FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13018{
13019 IEMOP_MNEMONIC("fdivr st0,m32r");
13020 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13021}
13022
13023
13024/** Opcode 0xd8. */
13025FNIEMOP_DEF(iemOp_EscF0)
13026{
13027 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13029
13030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13031 {
13032 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13033 {
13034 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13035 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13036 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13037 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13038 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13039 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13040 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13041 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13043 }
13044 }
13045 else
13046 {
13047 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13048 {
13049 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13050 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13051 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13052 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13053 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13054 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13055 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13056 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13058 }
13059 }
13060}
13061
13062
13063/** Opcode 0xd9 /0 mem32real
13064 * @sa iemOp_fld_m64r */
13065FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13066{
13067 IEMOP_MNEMONIC("fld m32r");
13068
13069 IEM_MC_BEGIN(2, 3);
13070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13071 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13072 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13073 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13074 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13075
13076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13078
13079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13081 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13082
13083 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13084 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13085 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13086 IEM_MC_ELSE()
13087 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13088 IEM_MC_ENDIF();
13089 IEM_MC_USED_FPU();
13090 IEM_MC_ADVANCE_RIP();
13091
13092 IEM_MC_END();
13093 return VINF_SUCCESS;
13094}
13095
13096
13097/** Opcode 0xd9 !11/2 mem32real */
13098FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13099{
13100 IEMOP_MNEMONIC("fst m32r");
13101 IEM_MC_BEGIN(3, 2);
13102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13103 IEM_MC_LOCAL(uint16_t, u16Fsw);
13104 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13105 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13106 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13107
13108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13112
13113 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13114 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13115 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13116 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13117 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13118 IEM_MC_ELSE()
13119 IEM_MC_IF_FCW_IM()
13120 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13121 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13122 IEM_MC_ENDIF();
13123 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13124 IEM_MC_ENDIF();
13125 IEM_MC_USED_FPU();
13126 IEM_MC_ADVANCE_RIP();
13127
13128 IEM_MC_END();
13129 return VINF_SUCCESS;
13130}
13131
13132
13133/** Opcode 0xd9 !11/3 */
13134FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13135{
13136 IEMOP_MNEMONIC("fstp m32r");
13137 IEM_MC_BEGIN(3, 2);
13138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13139 IEM_MC_LOCAL(uint16_t, u16Fsw);
13140 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13141 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13142 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13143
13144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13148
13149 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13150 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13151 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13152 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13153 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13154 IEM_MC_ELSE()
13155 IEM_MC_IF_FCW_IM()
13156 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13157 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13158 IEM_MC_ENDIF();
13159 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13160 IEM_MC_ENDIF();
13161 IEM_MC_USED_FPU();
13162 IEM_MC_ADVANCE_RIP();
13163
13164 IEM_MC_END();
13165 return VINF_SUCCESS;
13166}
13167
13168
13169/** Opcode 0xd9 !11/4 */
13170FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13171{
13172 IEMOP_MNEMONIC("fldenv m14/28byte");
13173 IEM_MC_BEGIN(3, 0);
13174 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13175 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13176 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13180 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13181 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13182 IEM_MC_END();
13183 return VINF_SUCCESS;
13184}
13185
13186
13187/** Opcode 0xd9 !11/5 */
13188FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13189{
13190 IEMOP_MNEMONIC("fldcw m2byte");
13191 IEM_MC_BEGIN(1, 1);
13192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13193 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13197 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13198 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13199 IEM_MC_END();
13200 return VINF_SUCCESS;
13201}
13202
13203
13204/** Opcode 0xd9 !11/6 */
13205FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13206{
13207 IEMOP_MNEMONIC("fstenv m14/m28byte");
13208 IEM_MC_BEGIN(3, 0);
13209 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13210 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13211 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13214 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13215 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13216 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13217 IEM_MC_END();
13218 return VINF_SUCCESS;
13219}
13220
13221
13222/** Opcode 0xd9 !11/7 */
13223FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13224{
13225 IEMOP_MNEMONIC("fnstcw m2byte");
13226 IEM_MC_BEGIN(2, 0);
13227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13228 IEM_MC_LOCAL(uint16_t, u16Fcw);
13229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13232 IEM_MC_FETCH_FCW(u16Fcw);
13233 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13234 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13235 IEM_MC_END();
13236 return VINF_SUCCESS;
13237}
13238
13239
13240/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13241FNIEMOP_DEF(iemOp_fnop)
13242{
13243 IEMOP_MNEMONIC("fnop");
13244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13245
13246 IEM_MC_BEGIN(0, 0);
13247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13248 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13249 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13250 * intel optimizations. Investigate. */
13251 IEM_MC_UPDATE_FPU_OPCODE_IP();
13252 IEM_MC_USED_FPU();
13253 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13254 IEM_MC_END();
13255 return VINF_SUCCESS;
13256}
13257
13258
13259/** Opcode 0xd9 11/0 stN */
13260FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13261{
13262 IEMOP_MNEMONIC("fld stN");
13263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13264
13265 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13266 * indicates that it does. */
13267 IEM_MC_BEGIN(0, 2);
13268 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13269 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13272 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13273 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13274 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13275 IEM_MC_ELSE()
13276 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13277 IEM_MC_ENDIF();
13278 IEM_MC_USED_FPU();
13279 IEM_MC_ADVANCE_RIP();
13280 IEM_MC_END();
13281
13282 return VINF_SUCCESS;
13283}
13284
13285
13286/** Opcode 0xd9 11/3 stN */
13287FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13288{
13289 IEMOP_MNEMONIC("fxch stN");
13290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13291
13292 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13293 * indicates that it does. */
13294 IEM_MC_BEGIN(1, 3);
13295 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13296 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13298 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13299 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13300 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13301 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13302 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13303 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13304 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13305 IEM_MC_ELSE()
13306 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13307 IEM_MC_ENDIF();
13308 IEM_MC_USED_FPU();
13309 IEM_MC_ADVANCE_RIP();
13310 IEM_MC_END();
13311
13312 return VINF_SUCCESS;
13313}
13314
13315
13316/** Opcode 0xd9 11/4, 0xdd 11/2. */
13317FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13318{
13319 IEMOP_MNEMONIC("fstp st0,stN");
13320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13321
13322 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13323 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13324 if (!iDstReg)
13325 {
13326 IEM_MC_BEGIN(0, 1);
13327 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13330 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13331 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13332 IEM_MC_ELSE()
13333 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13334 IEM_MC_ENDIF();
13335 IEM_MC_USED_FPU();
13336 IEM_MC_ADVANCE_RIP();
13337 IEM_MC_END();
13338 }
13339 else
13340 {
13341 IEM_MC_BEGIN(0, 2);
13342 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13343 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13344 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13345 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13346 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13347 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13348 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13349 IEM_MC_ELSE()
13350 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13351 IEM_MC_ENDIF();
13352 IEM_MC_USED_FPU();
13353 IEM_MC_ADVANCE_RIP();
13354 IEM_MC_END();
13355 }
13356 return VINF_SUCCESS;
13357}
13358
13359
13360/**
13361 * Common worker for FPU instructions working on ST0 and replaces it with the
13362 * result, i.e. unary operators.
13363 *
13364 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13365 */
13366FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13367{
13368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13369
13370 IEM_MC_BEGIN(2, 1);
13371 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13372 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13373 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13374
13375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13378 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13379 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13380 IEM_MC_ELSE()
13381 IEM_MC_FPU_STACK_UNDERFLOW(0);
13382 IEM_MC_ENDIF();
13383 IEM_MC_USED_FPU();
13384 IEM_MC_ADVANCE_RIP();
13385
13386 IEM_MC_END();
13387 return VINF_SUCCESS;
13388}
13389
13390
13391/** Opcode 0xd9 0xe0. */
13392FNIEMOP_DEF(iemOp_fchs)
13393{
13394 IEMOP_MNEMONIC("fchs st0");
13395 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13396}
13397
13398
13399/** Opcode 0xd9 0xe1. */
13400FNIEMOP_DEF(iemOp_fabs)
13401{
13402 IEMOP_MNEMONIC("fabs st0");
13403 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13404}
13405
13406
13407/**
13408 * Common worker for FPU instructions working on ST0 and only returns FSW.
13409 *
13410 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13411 */
13412FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13413{
13414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13415
13416 IEM_MC_BEGIN(2, 1);
13417 IEM_MC_LOCAL(uint16_t, u16Fsw);
13418 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13419 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13420
13421 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13422 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13423 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13424 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13425 IEM_MC_UPDATE_FSW(u16Fsw);
13426 IEM_MC_ELSE()
13427 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13428 IEM_MC_ENDIF();
13429 IEM_MC_USED_FPU();
13430 IEM_MC_ADVANCE_RIP();
13431
13432 IEM_MC_END();
13433 return VINF_SUCCESS;
13434}
13435
13436
13437/** Opcode 0xd9 0xe4. */
13438FNIEMOP_DEF(iemOp_ftst)
13439{
13440 IEMOP_MNEMONIC("ftst st0");
13441 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13442}
13443
13444
13445/** Opcode 0xd9 0xe5. */
13446FNIEMOP_DEF(iemOp_fxam)
13447{
13448 IEMOP_MNEMONIC("fxam st0");
13449 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13450}
13451
13452
13453/**
13454 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13455 *
13456 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13457 */
13458FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13459{
13460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13461
13462 IEM_MC_BEGIN(1, 1);
13463 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13464 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13465
13466 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13467 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13468 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13469 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13470 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13471 IEM_MC_ELSE()
13472 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13473 IEM_MC_ENDIF();
13474 IEM_MC_USED_FPU();
13475 IEM_MC_ADVANCE_RIP();
13476
13477 IEM_MC_END();
13478 return VINF_SUCCESS;
13479}
13480
13481
13482/** Opcode 0xd9 0xe8. */
13483FNIEMOP_DEF(iemOp_fld1)
13484{
13485 IEMOP_MNEMONIC("fld1");
13486 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13487}
13488
13489
13490/** Opcode 0xd9 0xe9. */
13491FNIEMOP_DEF(iemOp_fldl2t)
13492{
13493 IEMOP_MNEMONIC("fldl2t");
13494 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13495}
13496
13497
13498/** Opcode 0xd9 0xea. */
13499FNIEMOP_DEF(iemOp_fldl2e)
13500{
13501 IEMOP_MNEMONIC("fldl2e");
13502 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13503}
13504
13505/** Opcode 0xd9 0xeb. */
13506FNIEMOP_DEF(iemOp_fldpi)
13507{
13508 IEMOP_MNEMONIC("fldpi");
13509 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13510}
13511
13512
13513/** Opcode 0xd9 0xec. */
13514FNIEMOP_DEF(iemOp_fldlg2)
13515{
13516 IEMOP_MNEMONIC("fldlg2");
13517 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13518}
13519
13520/** Opcode 0xd9 0xed. */
13521FNIEMOP_DEF(iemOp_fldln2)
13522{
13523 IEMOP_MNEMONIC("fldln2");
13524 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13525}
13526
13527
13528/** Opcode 0xd9 0xee. */
13529FNIEMOP_DEF(iemOp_fldz)
13530{
13531 IEMOP_MNEMONIC("fldz");
13532 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13533}
13534
13535
13536/** Opcode 0xd9 0xf0. */
13537FNIEMOP_DEF(iemOp_f2xm1)
13538{
13539 IEMOP_MNEMONIC("f2xm1 st0");
13540 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13541}
13542
13543
13544/** Opcode 0xd9 0xf1. */
13545FNIEMOP_DEF(iemOp_fylx2)
13546{
13547 IEMOP_MNEMONIC("fylx2 st0");
13548 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13549}
13550
13551
13552/**
13553 * Common worker for FPU instructions working on ST0 and having two outputs, one
13554 * replacing ST0 and one pushed onto the stack.
13555 *
13556 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13557 */
13558FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13559{
13560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13561
13562 IEM_MC_BEGIN(2, 1);
13563 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13564 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13565 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13566
13567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13569 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13570 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13571 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13572 IEM_MC_ELSE()
13573 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13574 IEM_MC_ENDIF();
13575 IEM_MC_USED_FPU();
13576 IEM_MC_ADVANCE_RIP();
13577
13578 IEM_MC_END();
13579 return VINF_SUCCESS;
13580}
13581
13582
13583/** Opcode 0xd9 0xf2. */
13584FNIEMOP_DEF(iemOp_fptan)
13585{
13586 IEMOP_MNEMONIC("fptan st0");
13587 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13588}
13589
13590
13591/**
13592 * Common worker for FPU instructions working on STn and ST0, storing the result
13593 * in STn, and popping the stack unless IE, DE or ZE was raised.
13594 *
13595 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13596 */
13597FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13598{
13599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13600
13601 IEM_MC_BEGIN(3, 1);
13602 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13603 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13604 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13605 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13606
13607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13609
13610 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13611 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13612 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13613 IEM_MC_ELSE()
13614 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13615 IEM_MC_ENDIF();
13616 IEM_MC_USED_FPU();
13617 IEM_MC_ADVANCE_RIP();
13618
13619 IEM_MC_END();
13620 return VINF_SUCCESS;
13621}
13622
13623
13624/** Opcode 0xd9 0xf3. */
13625FNIEMOP_DEF(iemOp_fpatan)
13626{
13627 IEMOP_MNEMONIC("fpatan st1,st0");
13628 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13629}
13630
13631
13632/** Opcode 0xd9 0xf4. */
13633FNIEMOP_DEF(iemOp_fxtract)
13634{
13635 IEMOP_MNEMONIC("fxtract st0");
13636 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13637}
13638
13639
13640/** Opcode 0xd9 0xf5. */
13641FNIEMOP_DEF(iemOp_fprem1)
13642{
13643 IEMOP_MNEMONIC("fprem1 st0, st1");
13644 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13645}
13646
13647
13648/** Opcode 0xd9 0xf6. */
13649FNIEMOP_DEF(iemOp_fdecstp)
13650{
13651 IEMOP_MNEMONIC("fdecstp");
13652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13653 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13654 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13655 * FINCSTP and FDECSTP. */
13656
13657 IEM_MC_BEGIN(0,0);
13658
13659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13661
13662 IEM_MC_FPU_STACK_DEC_TOP();
13663 IEM_MC_UPDATE_FSW_CONST(0);
13664
13665 IEM_MC_USED_FPU();
13666 IEM_MC_ADVANCE_RIP();
13667 IEM_MC_END();
13668 return VINF_SUCCESS;
13669}
13670
13671
13672/** Opcode 0xd9 0xf7. */
13673FNIEMOP_DEF(iemOp_fincstp)
13674{
13675 IEMOP_MNEMONIC("fincstp");
13676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13677 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13678 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13679 * FINCSTP and FDECSTP. */
13680
13681 IEM_MC_BEGIN(0,0);
13682
13683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13684 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13685
13686 IEM_MC_FPU_STACK_INC_TOP();
13687 IEM_MC_UPDATE_FSW_CONST(0);
13688
13689 IEM_MC_USED_FPU();
13690 IEM_MC_ADVANCE_RIP();
13691 IEM_MC_END();
13692 return VINF_SUCCESS;
13693}
13694
13695
13696/** Opcode 0xd9 0xf8. */
13697FNIEMOP_DEF(iemOp_fprem)
13698{
13699 IEMOP_MNEMONIC("fprem st0, st1");
13700 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13701}
13702
13703
13704/** Opcode 0xd9 0xf9. */
13705FNIEMOP_DEF(iemOp_fyl2xp1)
13706{
13707 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13708 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13709}
13710
13711
13712/** Opcode 0xd9 0xfa. */
13713FNIEMOP_DEF(iemOp_fsqrt)
13714{
13715 IEMOP_MNEMONIC("fsqrt st0");
13716 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13717}
13718
13719
13720/** Opcode 0xd9 0xfb. */
13721FNIEMOP_DEF(iemOp_fsincos)
13722{
13723 IEMOP_MNEMONIC("fsincos st0");
13724 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13725}
13726
13727
13728/** Opcode 0xd9 0xfc. */
13729FNIEMOP_DEF(iemOp_frndint)
13730{
13731 IEMOP_MNEMONIC("frndint st0");
13732 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13733}
13734
13735
13736/** Opcode 0xd9 0xfd. */
13737FNIEMOP_DEF(iemOp_fscale)
13738{
13739 IEMOP_MNEMONIC("fscale st0, st1");
13740 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13741}
13742
13743
13744/** Opcode 0xd9 0xfe. */
13745FNIEMOP_DEF(iemOp_fsin)
13746{
13747 IEMOP_MNEMONIC("fsin st0");
13748 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13749}
13750
13751
13752/** Opcode 0xd9 0xff. */
13753FNIEMOP_DEF(iemOp_fcos)
13754{
13755 IEMOP_MNEMONIC("fcos st0");
13756 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13757}
13758
13759
13760/** Used by iemOp_EscF1. */
13761static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13762{
13763 /* 0xe0 */ iemOp_fchs,
13764 /* 0xe1 */ iemOp_fabs,
13765 /* 0xe2 */ iemOp_Invalid,
13766 /* 0xe3 */ iemOp_Invalid,
13767 /* 0xe4 */ iemOp_ftst,
13768 /* 0xe5 */ iemOp_fxam,
13769 /* 0xe6 */ iemOp_Invalid,
13770 /* 0xe7 */ iemOp_Invalid,
13771 /* 0xe8 */ iemOp_fld1,
13772 /* 0xe9 */ iemOp_fldl2t,
13773 /* 0xea */ iemOp_fldl2e,
13774 /* 0xeb */ iemOp_fldpi,
13775 /* 0xec */ iemOp_fldlg2,
13776 /* 0xed */ iemOp_fldln2,
13777 /* 0xee */ iemOp_fldz,
13778 /* 0xef */ iemOp_Invalid,
13779 /* 0xf0 */ iemOp_f2xm1,
13780 /* 0xf1 */ iemOp_fylx2,
13781 /* 0xf2 */ iemOp_fptan,
13782 /* 0xf3 */ iemOp_fpatan,
13783 /* 0xf4 */ iemOp_fxtract,
13784 /* 0xf5 */ iemOp_fprem1,
13785 /* 0xf6 */ iemOp_fdecstp,
13786 /* 0xf7 */ iemOp_fincstp,
13787 /* 0xf8 */ iemOp_fprem,
13788 /* 0xf9 */ iemOp_fyl2xp1,
13789 /* 0xfa */ iemOp_fsqrt,
13790 /* 0xfb */ iemOp_fsincos,
13791 /* 0xfc */ iemOp_frndint,
13792 /* 0xfd */ iemOp_fscale,
13793 /* 0xfe */ iemOp_fsin,
13794 /* 0xff */ iemOp_fcos
13795};
13796
13797
13798/** Opcode 0xd9. */
13799FNIEMOP_DEF(iemOp_EscF1)
13800{
13801 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13804 {
13805 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13806 {
13807 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13808 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13809 case 2:
13810 if (bRm == 0xd0)
13811 return FNIEMOP_CALL(iemOp_fnop);
13812 return IEMOP_RAISE_INVALID_OPCODE();
13813 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13814 case 4:
13815 case 5:
13816 case 6:
13817 case 7:
13818 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13819 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13821 }
13822 }
13823 else
13824 {
13825 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13826 {
13827 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13828 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13829 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13830 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13831 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13832 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13833 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13834 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13836 }
13837 }
13838}
13839
13840
13841/** Opcode 0xda 11/0. */
13842FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13843{
13844 IEMOP_MNEMONIC("fcmovb st0,stN");
13845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13846
13847 IEM_MC_BEGIN(0, 1);
13848 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13849
13850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13852
13853 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
13855 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13856 IEM_MC_ENDIF();
13857 IEM_MC_UPDATE_FPU_OPCODE_IP();
13858 IEM_MC_ELSE()
13859 IEM_MC_FPU_STACK_UNDERFLOW(0);
13860 IEM_MC_ENDIF();
13861 IEM_MC_USED_FPU();
13862 IEM_MC_ADVANCE_RIP();
13863
13864 IEM_MC_END();
13865 return VINF_SUCCESS;
13866}
13867
13868
13869/** Opcode 0xda 11/1. */
13870FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
13871{
13872 IEMOP_MNEMONIC("fcmove st0,stN");
13873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13874
13875 IEM_MC_BEGIN(0, 1);
13876 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13877
13878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13880
13881 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
13883 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13884 IEM_MC_ENDIF();
13885 IEM_MC_UPDATE_FPU_OPCODE_IP();
13886 IEM_MC_ELSE()
13887 IEM_MC_FPU_STACK_UNDERFLOW(0);
13888 IEM_MC_ENDIF();
13889 IEM_MC_USED_FPU();
13890 IEM_MC_ADVANCE_RIP();
13891
13892 IEM_MC_END();
13893 return VINF_SUCCESS;
13894}
13895
13896
13897/** Opcode 0xda 11/2. */
13898FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
13899{
13900 IEMOP_MNEMONIC("fcmovbe st0,stN");
13901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13902
13903 IEM_MC_BEGIN(0, 1);
13904 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13905
13906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13908
13909 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13910 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
13911 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13912 IEM_MC_ENDIF();
13913 IEM_MC_UPDATE_FPU_OPCODE_IP();
13914 IEM_MC_ELSE()
13915 IEM_MC_FPU_STACK_UNDERFLOW(0);
13916 IEM_MC_ENDIF();
13917 IEM_MC_USED_FPU();
13918 IEM_MC_ADVANCE_RIP();
13919
13920 IEM_MC_END();
13921 return VINF_SUCCESS;
13922}
13923
13924
13925/** Opcode 0xda 11/3. */
13926FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
13927{
13928 IEMOP_MNEMONIC("fcmovu st0,stN");
13929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13930
13931 IEM_MC_BEGIN(0, 1);
13932 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13933
13934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13936
13937 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13938 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
13939 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13940 IEM_MC_ENDIF();
13941 IEM_MC_UPDATE_FPU_OPCODE_IP();
13942 IEM_MC_ELSE()
13943 IEM_MC_FPU_STACK_UNDERFLOW(0);
13944 IEM_MC_ENDIF();
13945 IEM_MC_USED_FPU();
13946 IEM_MC_ADVANCE_RIP();
13947
13948 IEM_MC_END();
13949 return VINF_SUCCESS;
13950}
13951
13952
13953/**
13954 * Common worker for FPU instructions working on ST0 and STn, only affecting
13955 * flags, and popping twice when done.
13956 *
13957 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13958 */
13959FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13960{
13961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13962
13963 IEM_MC_BEGIN(3, 1);
13964 IEM_MC_LOCAL(uint16_t, u16Fsw);
13965 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13966 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13967 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13968
13969 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13970 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13971 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
13972 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13973 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
13974 IEM_MC_ELSE()
13975 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
13976 IEM_MC_ENDIF();
13977 IEM_MC_USED_FPU();
13978 IEM_MC_ADVANCE_RIP();
13979
13980 IEM_MC_END();
13981 return VINF_SUCCESS;
13982}
13983
13984
13985/** Opcode 0xda 0xe9. */
13986FNIEMOP_DEF(iemOp_fucompp)
13987{
13988 IEMOP_MNEMONIC("fucompp st0,stN");
13989 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
13990}
13991
13992
13993/**
13994 * Common worker for FPU instructions working on ST0 and an m32i, and storing
13995 * the result in ST0.
13996 *
13997 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13998 */
13999FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14000{
14001 IEM_MC_BEGIN(3, 3);
14002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14003 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14004 IEM_MC_LOCAL(int32_t, i32Val2);
14005 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14006 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14007 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14008
14009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14011
14012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14014 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14015
14016 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14017 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14018 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14019 IEM_MC_ELSE()
14020 IEM_MC_FPU_STACK_UNDERFLOW(0);
14021 IEM_MC_ENDIF();
14022 IEM_MC_USED_FPU();
14023 IEM_MC_ADVANCE_RIP();
14024
14025 IEM_MC_END();
14026 return VINF_SUCCESS;
14027}
14028
14029
14030/** Opcode 0xda !11/0. */
14031FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14032{
14033 IEMOP_MNEMONIC("fiadd m32i");
14034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14035}
14036
14037
14038/** Opcode 0xda !11/1. */
14039FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14040{
14041 IEMOP_MNEMONIC("fimul m32i");
14042 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14043}
14044
14045
14046/** Opcode 0xda !11/2. */
14047FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14048{
14049 IEMOP_MNEMONIC("ficom st0,m32i");
14050
14051 IEM_MC_BEGIN(3, 3);
14052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14053 IEM_MC_LOCAL(uint16_t, u16Fsw);
14054 IEM_MC_LOCAL(int32_t, i32Val2);
14055 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14057 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14058
14059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14061
14062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14064 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14065
14066 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14067 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14068 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14069 IEM_MC_ELSE()
14070 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14071 IEM_MC_ENDIF();
14072 IEM_MC_USED_FPU();
14073 IEM_MC_ADVANCE_RIP();
14074
14075 IEM_MC_END();
14076 return VINF_SUCCESS;
14077}
14078
14079
14080/** Opcode 0xda !11/3. */
14081FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14082{
14083 IEMOP_MNEMONIC("ficomp st0,m32i");
14084
14085 IEM_MC_BEGIN(3, 3);
14086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14087 IEM_MC_LOCAL(uint16_t, u16Fsw);
14088 IEM_MC_LOCAL(int32_t, i32Val2);
14089 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14091 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14092
14093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14095
14096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14098 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14099
14100 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14101 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14102 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14103 IEM_MC_ELSE()
14104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14105 IEM_MC_ENDIF();
14106 IEM_MC_USED_FPU();
14107 IEM_MC_ADVANCE_RIP();
14108
14109 IEM_MC_END();
14110 return VINF_SUCCESS;
14111}
14112
14113
14114/** Opcode 0xda !11/4. */
14115FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14116{
14117 IEMOP_MNEMONIC("fisub m32i");
14118 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14119}
14120
14121
14122/** Opcode 0xda !11/5. */
14123FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14124{
14125 IEMOP_MNEMONIC("fisubr m32i");
14126 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14127}
14128
14129
14130/** Opcode 0xda !11/6. */
14131FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14132{
14133 IEMOP_MNEMONIC("fidiv m32i");
14134 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14135}
14136
14137
14138/** Opcode 0xda !11/7. */
14139FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14140{
14141 IEMOP_MNEMONIC("fidivr m32i");
14142 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14143}
14144
14145
14146/** Opcode 0xda. */
14147FNIEMOP_DEF(iemOp_EscF2)
14148{
14149 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14152 {
14153 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14154 {
14155 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14156 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14157 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14158 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14159 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14160 case 5:
14161 if (bRm == 0xe9)
14162 return FNIEMOP_CALL(iemOp_fucompp);
14163 return IEMOP_RAISE_INVALID_OPCODE();
14164 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14165 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14167 }
14168 }
14169 else
14170 {
14171 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14172 {
14173 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14174 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14175 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14176 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14177 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14178 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14179 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14180 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14182 }
14183 }
14184}
14185
14186
14187/** Opcode 0xdb !11/0. */
14188FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14189{
14190 IEMOP_MNEMONIC("fild m32i");
14191
14192 IEM_MC_BEGIN(2, 3);
14193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14194 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14195 IEM_MC_LOCAL(int32_t, i32Val);
14196 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14197 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14198
14199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14201
14202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14204 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14205
14206 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14207 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14208 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14209 IEM_MC_ELSE()
14210 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14211 IEM_MC_ENDIF();
14212 IEM_MC_USED_FPU();
14213 IEM_MC_ADVANCE_RIP();
14214
14215 IEM_MC_END();
14216 return VINF_SUCCESS;
14217}
14218
14219
14220/** Opcode 0xdb !11/1. */
14221FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14222{
14223 IEMOP_MNEMONIC("fisttp m32i");
14224 IEM_MC_BEGIN(3, 2);
14225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14226 IEM_MC_LOCAL(uint16_t, u16Fsw);
14227 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14228 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14229 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14230
14231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14234 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14235
14236 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14237 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14238 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14239 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14240 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14241 IEM_MC_ELSE()
14242 IEM_MC_IF_FCW_IM()
14243 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14244 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14245 IEM_MC_ENDIF();
14246 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14247 IEM_MC_ENDIF();
14248 IEM_MC_USED_FPU();
14249 IEM_MC_ADVANCE_RIP();
14250
14251 IEM_MC_END();
14252 return VINF_SUCCESS;
14253}
14254
14255
14256/** Opcode 0xdb !11/2. */
14257FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14258{
14259 IEMOP_MNEMONIC("fist m32i");
14260 IEM_MC_BEGIN(3, 2);
14261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14262 IEM_MC_LOCAL(uint16_t, u16Fsw);
14263 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14264 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14265 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14266
14267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14271
14272 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14273 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14274 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14275 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14276 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14277 IEM_MC_ELSE()
14278 IEM_MC_IF_FCW_IM()
14279 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14280 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14281 IEM_MC_ENDIF();
14282 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14283 IEM_MC_ENDIF();
14284 IEM_MC_USED_FPU();
14285 IEM_MC_ADVANCE_RIP();
14286
14287 IEM_MC_END();
14288 return VINF_SUCCESS;
14289}
14290
14291
14292/** Opcode 0xdb !11/3. */
14293FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14294{
14295 IEMOP_MNEMONIC("fisttp m32i");
14296 IEM_MC_BEGIN(3, 2);
14297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14298 IEM_MC_LOCAL(uint16_t, u16Fsw);
14299 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14300 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14302
14303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14307
14308 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14309 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14310 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14311 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14312 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14313 IEM_MC_ELSE()
14314 IEM_MC_IF_FCW_IM()
14315 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14316 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14317 IEM_MC_ENDIF();
14318 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14319 IEM_MC_ENDIF();
14320 IEM_MC_USED_FPU();
14321 IEM_MC_ADVANCE_RIP();
14322
14323 IEM_MC_END();
14324 return VINF_SUCCESS;
14325}
14326
14327
14328/** Opcode 0xdb !11/5. */
14329FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14330{
14331 IEMOP_MNEMONIC("fld m80r");
14332
14333 IEM_MC_BEGIN(2, 3);
14334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14335 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14336 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14337 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14338 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14339
14340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14342
14343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14345 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14346
14347 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14348 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14349 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14350 IEM_MC_ELSE()
14351 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14352 IEM_MC_ENDIF();
14353 IEM_MC_USED_FPU();
14354 IEM_MC_ADVANCE_RIP();
14355
14356 IEM_MC_END();
14357 return VINF_SUCCESS;
14358}
14359
14360
14361/** Opcode 0xdb !11/7. */
14362FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14363{
14364 IEMOP_MNEMONIC("fstp m80r");
14365 IEM_MC_BEGIN(3, 2);
14366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14367 IEM_MC_LOCAL(uint16_t, u16Fsw);
14368 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14369 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14371
14372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14376
14377 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14378 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14379 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14380 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14381 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14382 IEM_MC_ELSE()
14383 IEM_MC_IF_FCW_IM()
14384 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14385 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14386 IEM_MC_ENDIF();
14387 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14388 IEM_MC_ENDIF();
14389 IEM_MC_USED_FPU();
14390 IEM_MC_ADVANCE_RIP();
14391
14392 IEM_MC_END();
14393 return VINF_SUCCESS;
14394}
14395
14396
14397/** Opcode 0xdb 11/0. */
14398FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14399{
14400 IEMOP_MNEMONIC("fcmovnb st0,stN");
14401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14402
14403 IEM_MC_BEGIN(0, 1);
14404 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14405
14406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14408
14409 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14410 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14411 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14412 IEM_MC_ENDIF();
14413 IEM_MC_UPDATE_FPU_OPCODE_IP();
14414 IEM_MC_ELSE()
14415 IEM_MC_FPU_STACK_UNDERFLOW(0);
14416 IEM_MC_ENDIF();
14417 IEM_MC_USED_FPU();
14418 IEM_MC_ADVANCE_RIP();
14419
14420 IEM_MC_END();
14421 return VINF_SUCCESS;
14422}
14423
14424
14425/** Opcode 0xdb 11/1. */
14426FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14427{
14428 IEMOP_MNEMONIC("fcmovne st0,stN");
14429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14430
14431 IEM_MC_BEGIN(0, 1);
14432 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14433
14434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14436
14437 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14438 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14439 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14440 IEM_MC_ENDIF();
14441 IEM_MC_UPDATE_FPU_OPCODE_IP();
14442 IEM_MC_ELSE()
14443 IEM_MC_FPU_STACK_UNDERFLOW(0);
14444 IEM_MC_ENDIF();
14445 IEM_MC_USED_FPU();
14446 IEM_MC_ADVANCE_RIP();
14447
14448 IEM_MC_END();
14449 return VINF_SUCCESS;
14450}
14451
14452
14453/** Opcode 0xdb 11/2. */
14454FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14455{
14456 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14458
14459 IEM_MC_BEGIN(0, 1);
14460 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14461
14462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14463 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14464
14465 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14466 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14467 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14468 IEM_MC_ENDIF();
14469 IEM_MC_UPDATE_FPU_OPCODE_IP();
14470 IEM_MC_ELSE()
14471 IEM_MC_FPU_STACK_UNDERFLOW(0);
14472 IEM_MC_ENDIF();
14473 IEM_MC_USED_FPU();
14474 IEM_MC_ADVANCE_RIP();
14475
14476 IEM_MC_END();
14477 return VINF_SUCCESS;
14478}
14479
14480
14481/** Opcode 0xdb 11/3. */
14482FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14483{
14484 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14486
14487 IEM_MC_BEGIN(0, 1);
14488 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14489
14490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14492
14493 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14494 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14495 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14496 IEM_MC_ENDIF();
14497 IEM_MC_UPDATE_FPU_OPCODE_IP();
14498 IEM_MC_ELSE()
14499 IEM_MC_FPU_STACK_UNDERFLOW(0);
14500 IEM_MC_ENDIF();
14501 IEM_MC_USED_FPU();
14502 IEM_MC_ADVANCE_RIP();
14503
14504 IEM_MC_END();
14505 return VINF_SUCCESS;
14506}
14507
14508
14509/** Opcode 0xdb 0xe0. */
14510FNIEMOP_DEF(iemOp_fneni)
14511{
14512 IEMOP_MNEMONIC("fneni (8087/ign)");
14513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14514 IEM_MC_BEGIN(0,0);
14515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14516 IEM_MC_ADVANCE_RIP();
14517 IEM_MC_END();
14518 return VINF_SUCCESS;
14519}
14520
14521
14522/** Opcode 0xdb 0xe1. */
14523FNIEMOP_DEF(iemOp_fndisi)
14524{
14525 IEMOP_MNEMONIC("fndisi (8087/ign)");
14526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14527 IEM_MC_BEGIN(0,0);
14528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14529 IEM_MC_ADVANCE_RIP();
14530 IEM_MC_END();
14531 return VINF_SUCCESS;
14532}
14533
14534
14535/** Opcode 0xdb 0xe2. */
14536FNIEMOP_DEF(iemOp_fnclex)
14537{
14538 IEMOP_MNEMONIC("fnclex");
14539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14540
14541 IEM_MC_BEGIN(0,0);
14542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14543 IEM_MC_CLEAR_FSW_EX();
14544 IEM_MC_ADVANCE_RIP();
14545 IEM_MC_END();
14546 return VINF_SUCCESS;
14547}
14548
14549
14550/** Opcode 0xdb 0xe3. */
14551FNIEMOP_DEF(iemOp_fninit)
14552{
14553 IEMOP_MNEMONIC("fninit");
14554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14555 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14556}
14557
14558
14559/** Opcode 0xdb 0xe4. */
14560FNIEMOP_DEF(iemOp_fnsetpm)
14561{
14562 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14564 IEM_MC_BEGIN(0,0);
14565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14566 IEM_MC_ADVANCE_RIP();
14567 IEM_MC_END();
14568 return VINF_SUCCESS;
14569}
14570
14571
14572/** Opcode 0xdb 0xe5. */
14573FNIEMOP_DEF(iemOp_frstpm)
14574{
14575 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14576#if 0 /* #UDs on newer CPUs */
14577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14578 IEM_MC_BEGIN(0,0);
14579 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14580 IEM_MC_ADVANCE_RIP();
14581 IEM_MC_END();
14582 return VINF_SUCCESS;
14583#else
14584 return IEMOP_RAISE_INVALID_OPCODE();
14585#endif
14586}
14587
14588
14589/** Opcode 0xdb 11/5. */
14590FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14591{
14592 IEMOP_MNEMONIC("fucomi st0,stN");
14593 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14594}
14595
14596
14597/** Opcode 0xdb 11/6. */
14598FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14599{
14600 IEMOP_MNEMONIC("fcomi st0,stN");
14601 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14602}
14603
14604
14605/** Opcode 0xdb. */
14606FNIEMOP_DEF(iemOp_EscF3)
14607{
14608 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14611 {
14612 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14613 {
14614 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14615 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14616 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14617 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14618 case 4:
14619 switch (bRm)
14620 {
14621 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14622 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14623 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14624 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14625 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14626 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14627 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14628 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14630 }
14631 break;
14632 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14633 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14634 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14636 }
14637 }
14638 else
14639 {
14640 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14641 {
14642 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14643 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14644 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14645 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14646 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14647 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14648 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14649 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14651 }
14652 }
14653}
14654
14655
14656/**
14657 * Common worker for FPU instructions working on STn and ST0, and storing the
14658 * result in STn unless IE, DE or ZE was raised.
14659 *
14660 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14661 */
14662FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14663{
14664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14665
14666 IEM_MC_BEGIN(3, 1);
14667 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14668 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14669 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14670 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14671
14672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14674
14675 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14676 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14677 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14678 IEM_MC_ELSE()
14679 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14680 IEM_MC_ENDIF();
14681 IEM_MC_USED_FPU();
14682 IEM_MC_ADVANCE_RIP();
14683
14684 IEM_MC_END();
14685 return VINF_SUCCESS;
14686}
14687
14688
14689/** Opcode 0xdc 11/0. */
14690FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14691{
14692 IEMOP_MNEMONIC("fadd stN,st0");
14693 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14694}
14695
14696
14697/** Opcode 0xdc 11/1. */
14698FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14699{
14700 IEMOP_MNEMONIC("fmul stN,st0");
14701 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14702}
14703
14704
14705/** Opcode 0xdc 11/4. */
14706FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14707{
14708 IEMOP_MNEMONIC("fsubr stN,st0");
14709 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14710}
14711
14712
14713/** Opcode 0xdc 11/5. */
14714FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14715{
14716 IEMOP_MNEMONIC("fsub stN,st0");
14717 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14718}
14719
14720
14721/** Opcode 0xdc 11/6. */
14722FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14723{
14724 IEMOP_MNEMONIC("fdivr stN,st0");
14725 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14726}
14727
14728
14729/** Opcode 0xdc 11/7. */
14730FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14731{
14732 IEMOP_MNEMONIC("fdiv stN,st0");
14733 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14734}
14735
14736
14737/**
14738 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14739 * memory operand, and storing the result in ST0.
14740 *
14741 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14742 */
14743FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14744{
14745 IEM_MC_BEGIN(3, 3);
14746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14747 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14748 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14749 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14750 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14751 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14752
14753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14755 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14756 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14757
14758 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14759 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14760 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14761 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14762 IEM_MC_ELSE()
14763 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14764 IEM_MC_ENDIF();
14765 IEM_MC_USED_FPU();
14766 IEM_MC_ADVANCE_RIP();
14767
14768 IEM_MC_END();
14769 return VINF_SUCCESS;
14770}
14771
14772
14773/** Opcode 0xdc !11/0. */
14774FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14775{
14776 IEMOP_MNEMONIC("fadd m64r");
14777 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14778}
14779
14780
14781/** Opcode 0xdc !11/1. */
14782FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14783{
14784 IEMOP_MNEMONIC("fmul m64r");
14785 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14786}
14787
14788
14789/** Opcode 0xdc !11/2. */
14790FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14791{
14792 IEMOP_MNEMONIC("fcom st0,m64r");
14793
14794 IEM_MC_BEGIN(3, 3);
14795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14796 IEM_MC_LOCAL(uint16_t, u16Fsw);
14797 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14798 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14799 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14800 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14801
14802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14804
14805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14807 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14808
14809 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14810 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14811 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14812 IEM_MC_ELSE()
14813 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14814 IEM_MC_ENDIF();
14815 IEM_MC_USED_FPU();
14816 IEM_MC_ADVANCE_RIP();
14817
14818 IEM_MC_END();
14819 return VINF_SUCCESS;
14820}
14821
14822
14823/** Opcode 0xdc !11/3. */
14824FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14825{
14826 IEMOP_MNEMONIC("fcomp st0,m64r");
14827
14828 IEM_MC_BEGIN(3, 3);
14829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14830 IEM_MC_LOCAL(uint16_t, u16Fsw);
14831 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14832 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14833 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14834 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14835
14836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14838
14839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14841 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14842
14843 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14844 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14845 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14846 IEM_MC_ELSE()
14847 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14848 IEM_MC_ENDIF();
14849 IEM_MC_USED_FPU();
14850 IEM_MC_ADVANCE_RIP();
14851
14852 IEM_MC_END();
14853 return VINF_SUCCESS;
14854}
14855
14856
14857/** Opcode 0xdc !11/4. */
14858FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
14859{
14860 IEMOP_MNEMONIC("fsub m64r");
14861 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
14862}
14863
14864
14865/** Opcode 0xdc !11/5. */
14866FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
14867{
14868 IEMOP_MNEMONIC("fsubr m64r");
14869 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
14870}
14871
14872
14873/** Opcode 0xdc !11/6. */
14874FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
14875{
14876 IEMOP_MNEMONIC("fdiv m64r");
14877 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
14878}
14879
14880
14881/** Opcode 0xdc !11/7. */
14882FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
14883{
14884 IEMOP_MNEMONIC("fdivr m64r");
14885 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
14886}
14887
14888
14889/** Opcode 0xdc. */
14890FNIEMOP_DEF(iemOp_EscF4)
14891{
14892 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14895 {
14896 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14897 {
14898 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
14899 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
14900 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
14901 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
14902 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
14903 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
14904 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
14905 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
14906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14907 }
14908 }
14909 else
14910 {
14911 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14912 {
14913 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
14914 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
14915 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
14916 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
14917 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
14918 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
14919 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
14920 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
14921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14922 }
14923 }
14924}
14925
14926
14927/** Opcode 0xdd !11/0.
14928 * @sa iemOp_fld_m32r */
14929FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
14930{
14931 IEMOP_MNEMONIC("fld m64r");
14932
14933 IEM_MC_BEGIN(2, 3);
14934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14935 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14936 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
14937 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14938 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
14939
14940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14942 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14943 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14944
14945 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14946 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14947 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
14948 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14949 IEM_MC_ELSE()
14950 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14951 IEM_MC_ENDIF();
14952 IEM_MC_USED_FPU();
14953 IEM_MC_ADVANCE_RIP();
14954
14955 IEM_MC_END();
14956 return VINF_SUCCESS;
14957}
14958
14959
14960/** Opcode 0xdd !11/0. */
14961FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
14962{
14963 IEMOP_MNEMONIC("fisttp m64i");
14964 IEM_MC_BEGIN(3, 2);
14965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14966 IEM_MC_LOCAL(uint16_t, u16Fsw);
14967 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14968 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14969 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14970
14971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14974 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14975
14976 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14977 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14978 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14979 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14980 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14981 IEM_MC_ELSE()
14982 IEM_MC_IF_FCW_IM()
14983 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14984 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14985 IEM_MC_ENDIF();
14986 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14987 IEM_MC_ENDIF();
14988 IEM_MC_USED_FPU();
14989 IEM_MC_ADVANCE_RIP();
14990
14991 IEM_MC_END();
14992 return VINF_SUCCESS;
14993}
14994
14995
14996/** Opcode 0xdd !11/0. */
14997FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
14998{
14999 IEMOP_MNEMONIC("fst m64r");
15000 IEM_MC_BEGIN(3, 2);
15001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15002 IEM_MC_LOCAL(uint16_t, u16Fsw);
15003 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15004 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15005 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15006
15007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15009 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15010 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15011
15012 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15013 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15014 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15015 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15016 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15017 IEM_MC_ELSE()
15018 IEM_MC_IF_FCW_IM()
15019 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15020 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15021 IEM_MC_ENDIF();
15022 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15023 IEM_MC_ENDIF();
15024 IEM_MC_USED_FPU();
15025 IEM_MC_ADVANCE_RIP();
15026
15027 IEM_MC_END();
15028 return VINF_SUCCESS;
15029}
15030
15031
15032
15033
15034/** Opcode 0xdd !11/0. */
15035FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15036{
15037 IEMOP_MNEMONIC("fstp m64r");
15038 IEM_MC_BEGIN(3, 2);
15039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15040 IEM_MC_LOCAL(uint16_t, u16Fsw);
15041 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15042 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15043 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15044
15045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15049
15050 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15051 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15052 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15053 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15054 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15055 IEM_MC_ELSE()
15056 IEM_MC_IF_FCW_IM()
15057 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15058 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15059 IEM_MC_ENDIF();
15060 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15061 IEM_MC_ENDIF();
15062 IEM_MC_USED_FPU();
15063 IEM_MC_ADVANCE_RIP();
15064
15065 IEM_MC_END();
15066 return VINF_SUCCESS;
15067}
15068
15069
15070/** Opcode 0xdd !11/0. */
15071FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15072{
15073 IEMOP_MNEMONIC("frstor m94/108byte");
15074 IEM_MC_BEGIN(3, 0);
15075 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15076 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15077 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15081 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15082 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15083 IEM_MC_END();
15084 return VINF_SUCCESS;
15085}
15086
15087
15088/** Opcode 0xdd !11/0. */
15089FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15090{
15091 IEMOP_MNEMONIC("fnsave m94/108byte");
15092 IEM_MC_BEGIN(3, 0);
15093 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15094 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15095 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15099 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15100 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15101 IEM_MC_END();
15102 return VINF_SUCCESS;
15103
15104}
15105
15106/** Opcode 0xdd !11/0. */
15107FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15108{
15109 IEMOP_MNEMONIC("fnstsw m16");
15110
15111 IEM_MC_BEGIN(0, 2);
15112 IEM_MC_LOCAL(uint16_t, u16Tmp);
15113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15114
15115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15117 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15118
15119 IEM_MC_FETCH_FSW(u16Tmp);
15120 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15121 IEM_MC_ADVANCE_RIP();
15122
15123/** @todo Debug / drop a hint to the verifier that things may differ
15124 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15125 * NT4SP1. (X86_FSW_PE) */
15126 IEM_MC_END();
15127 return VINF_SUCCESS;
15128}
15129
15130
15131/** Opcode 0xdd 11/0. */
15132FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15133{
15134 IEMOP_MNEMONIC("ffree stN");
15135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15136 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15137 unmodified. */
15138
15139 IEM_MC_BEGIN(0, 0);
15140
15141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15142 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15143
15144 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15145 IEM_MC_UPDATE_FPU_OPCODE_IP();
15146
15147 IEM_MC_USED_FPU();
15148 IEM_MC_ADVANCE_RIP();
15149 IEM_MC_END();
15150 return VINF_SUCCESS;
15151}
15152
15153
15154/** Opcode 0xdd 11/1. */
15155FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15156{
15157 IEMOP_MNEMONIC("fst st0,stN");
15158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15159
15160 IEM_MC_BEGIN(0, 2);
15161 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15162 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15165 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15166 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15167 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15168 IEM_MC_ELSE()
15169 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15170 IEM_MC_ENDIF();
15171 IEM_MC_USED_FPU();
15172 IEM_MC_ADVANCE_RIP();
15173 IEM_MC_END();
15174 return VINF_SUCCESS;
15175}
15176
15177
15178/** Opcode 0xdd 11/3. */
15179FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15180{
15181 IEMOP_MNEMONIC("fcom st0,stN");
15182 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15183}
15184
15185
15186/** Opcode 0xdd 11/4. */
15187FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15188{
15189 IEMOP_MNEMONIC("fcomp st0,stN");
15190 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15191}
15192
15193
15194/** Opcode 0xdd. */
15195FNIEMOP_DEF(iemOp_EscF5)
15196{
15197 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15200 {
15201 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15202 {
15203 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15204 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15205 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15206 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15207 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15208 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15209 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15210 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15212 }
15213 }
15214 else
15215 {
15216 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15217 {
15218 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15219 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15220 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15221 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15222 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15223 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15224 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15225 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15227 }
15228 }
15229}
15230
15231
15232/** Opcode 0xde 11/0. */
15233FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15234{
15235 IEMOP_MNEMONIC("faddp stN,st0");
15236 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15237}
15238
15239
15240/** Opcode 0xde 11/0. */
15241FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15242{
15243 IEMOP_MNEMONIC("fmulp stN,st0");
15244 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15245}
15246
15247
15248/** Opcode 0xde 0xd9. */
15249FNIEMOP_DEF(iemOp_fcompp)
15250{
15251 IEMOP_MNEMONIC("fucompp st0,stN");
15252 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15253}
15254
15255
15256/** Opcode 0xde 11/4. */
15257FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15258{
15259 IEMOP_MNEMONIC("fsubrp stN,st0");
15260 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15261}
15262
15263
15264/** Opcode 0xde 11/5. */
15265FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15266{
15267 IEMOP_MNEMONIC("fsubp stN,st0");
15268 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15269}
15270
15271
15272/** Opcode 0xde 11/6. */
15273FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15274{
15275 IEMOP_MNEMONIC("fdivrp stN,st0");
15276 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15277}
15278
15279
15280/** Opcode 0xde 11/7. */
15281FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15282{
15283 IEMOP_MNEMONIC("fdivp stN,st0");
15284 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15285}
15286
15287
15288/**
15289 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15290 * the result in ST0.
15291 *
15292 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15293 */
15294FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15295{
15296 IEM_MC_BEGIN(3, 3);
15297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15298 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15299 IEM_MC_LOCAL(int16_t, i16Val2);
15300 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15302 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15303
15304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15306
15307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15309 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15310
15311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15312 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15313 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15314 IEM_MC_ELSE()
15315 IEM_MC_FPU_STACK_UNDERFLOW(0);
15316 IEM_MC_ENDIF();
15317 IEM_MC_USED_FPU();
15318 IEM_MC_ADVANCE_RIP();
15319
15320 IEM_MC_END();
15321 return VINF_SUCCESS;
15322}
15323
15324
15325/** Opcode 0xde !11/0. */
15326FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15327{
15328 IEMOP_MNEMONIC("fiadd m16i");
15329 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15330}
15331
15332
15333/** Opcode 0xde !11/1. */
15334FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15335{
15336 IEMOP_MNEMONIC("fimul m16i");
15337 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15338}
15339
15340
15341/** Opcode 0xde !11/2. */
15342FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15343{
15344 IEMOP_MNEMONIC("ficom st0,m16i");
15345
15346 IEM_MC_BEGIN(3, 3);
15347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15348 IEM_MC_LOCAL(uint16_t, u16Fsw);
15349 IEM_MC_LOCAL(int16_t, i16Val2);
15350 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15351 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15352 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15353
15354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15356
15357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15359 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15360
15361 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15362 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15363 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15364 IEM_MC_ELSE()
15365 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15366 IEM_MC_ENDIF();
15367 IEM_MC_USED_FPU();
15368 IEM_MC_ADVANCE_RIP();
15369
15370 IEM_MC_END();
15371 return VINF_SUCCESS;
15372}
15373
15374
15375/** Opcode 0xde !11/3. */
15376FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15377{
15378 IEMOP_MNEMONIC("ficomp st0,m16i");
15379
15380 IEM_MC_BEGIN(3, 3);
15381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15382 IEM_MC_LOCAL(uint16_t, u16Fsw);
15383 IEM_MC_LOCAL(int16_t, i16Val2);
15384 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15385 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15386 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15387
15388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15390
15391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15393 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15394
15395 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15396 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15397 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15398 IEM_MC_ELSE()
15399 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15400 IEM_MC_ENDIF();
15401 IEM_MC_USED_FPU();
15402 IEM_MC_ADVANCE_RIP();
15403
15404 IEM_MC_END();
15405 return VINF_SUCCESS;
15406}
15407
15408
15409/** Opcode 0xde !11/4. */
15410FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15411{
15412 IEMOP_MNEMONIC("fisub m16i");
15413 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15414}
15415
15416
15417/** Opcode 0xde !11/5. */
15418FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15419{
15420 IEMOP_MNEMONIC("fisubr m16i");
15421 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15422}
15423
15424
15425/** Opcode 0xde !11/6. */
15426FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15427{
15428 IEMOP_MNEMONIC("fiadd m16i");
15429 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15430}
15431
15432
15433/** Opcode 0xde !11/7. */
15434FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15435{
15436 IEMOP_MNEMONIC("fiadd m16i");
15437 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15438}
15439
15440
15441/** Opcode 0xde. */
15442FNIEMOP_DEF(iemOp_EscF6)
15443{
15444 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15446 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15447 {
15448 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15449 {
15450 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15451 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15452 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15453 case 3: if (bRm == 0xd9)
15454 return FNIEMOP_CALL(iemOp_fcompp);
15455 return IEMOP_RAISE_INVALID_OPCODE();
15456 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15457 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15458 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15459 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15461 }
15462 }
15463 else
15464 {
15465 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15466 {
15467 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15468 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15469 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15470 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15471 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15472 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15473 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15474 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15476 }
15477 }
15478}
15479
15480
15481/** Opcode 0xdf 11/0.
15482 * Undocument instruction, assumed to work like ffree + fincstp. */
15483FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15484{
15485 IEMOP_MNEMONIC("ffreep stN");
15486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15487
15488 IEM_MC_BEGIN(0, 0);
15489
15490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15492
15493 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15494 IEM_MC_FPU_STACK_INC_TOP();
15495 IEM_MC_UPDATE_FPU_OPCODE_IP();
15496
15497 IEM_MC_USED_FPU();
15498 IEM_MC_ADVANCE_RIP();
15499 IEM_MC_END();
15500 return VINF_SUCCESS;
15501}
15502
15503
15504/** Opcode 0xdf 0xe0. */
15505FNIEMOP_DEF(iemOp_fnstsw_ax)
15506{
15507 IEMOP_MNEMONIC("fnstsw ax");
15508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15509
15510 IEM_MC_BEGIN(0, 1);
15511 IEM_MC_LOCAL(uint16_t, u16Tmp);
15512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15513 IEM_MC_FETCH_FSW(u16Tmp);
15514 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15515 IEM_MC_ADVANCE_RIP();
15516 IEM_MC_END();
15517 return VINF_SUCCESS;
15518}
15519
15520
15521/** Opcode 0xdf 11/5. */
15522FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15523{
15524 IEMOP_MNEMONIC("fcomip st0,stN");
15525 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15526}
15527
15528
15529/** Opcode 0xdf 11/6. */
15530FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15531{
15532 IEMOP_MNEMONIC("fcomip st0,stN");
15533 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15534}
15535
15536
15537/** Opcode 0xdf !11/0. */
15538FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15539{
15540 IEMOP_MNEMONIC("fild m16i");
15541
15542 IEM_MC_BEGIN(2, 3);
15543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15544 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15545 IEM_MC_LOCAL(int16_t, i16Val);
15546 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15547 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15548
15549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15551
15552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15554 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15555
15556 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15557 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15558 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15559 IEM_MC_ELSE()
15560 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15561 IEM_MC_ENDIF();
15562 IEM_MC_USED_FPU();
15563 IEM_MC_ADVANCE_RIP();
15564
15565 IEM_MC_END();
15566 return VINF_SUCCESS;
15567}
15568
15569
15570/** Opcode 0xdf !11/1. */
15571FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15572{
15573 IEMOP_MNEMONIC("fisttp m16i");
15574 IEM_MC_BEGIN(3, 2);
15575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15576 IEM_MC_LOCAL(uint16_t, u16Fsw);
15577 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15578 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15580
15581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15585
15586 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15587 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15588 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15589 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15590 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15591 IEM_MC_ELSE()
15592 IEM_MC_IF_FCW_IM()
15593 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15594 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15595 IEM_MC_ENDIF();
15596 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15597 IEM_MC_ENDIF();
15598 IEM_MC_USED_FPU();
15599 IEM_MC_ADVANCE_RIP();
15600
15601 IEM_MC_END();
15602 return VINF_SUCCESS;
15603}
15604
15605
15606/** Opcode 0xdf !11/2. */
15607FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15608{
15609 IEMOP_MNEMONIC("fistp m16i");
15610 IEM_MC_BEGIN(3, 2);
15611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15612 IEM_MC_LOCAL(uint16_t, u16Fsw);
15613 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15614 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15615 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15616
15617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15621
15622 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15623 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15624 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15625 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15626 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15627 IEM_MC_ELSE()
15628 IEM_MC_IF_FCW_IM()
15629 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15630 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15631 IEM_MC_ENDIF();
15632 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15633 IEM_MC_ENDIF();
15634 IEM_MC_USED_FPU();
15635 IEM_MC_ADVANCE_RIP();
15636
15637 IEM_MC_END();
15638 return VINF_SUCCESS;
15639}
15640
15641
15642/** Opcode 0xdf !11/3. */
15643FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15644{
15645 IEMOP_MNEMONIC("fistp m16i");
15646 IEM_MC_BEGIN(3, 2);
15647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15648 IEM_MC_LOCAL(uint16_t, u16Fsw);
15649 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15650 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15652
15653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15657
15658 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15659 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15660 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15661 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15662 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15663 IEM_MC_ELSE()
15664 IEM_MC_IF_FCW_IM()
15665 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15666 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15667 IEM_MC_ENDIF();
15668 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15669 IEM_MC_ENDIF();
15670 IEM_MC_USED_FPU();
15671 IEM_MC_ADVANCE_RIP();
15672
15673 IEM_MC_END();
15674 return VINF_SUCCESS;
15675}
15676
15677
15678/** Opcode 0xdf !11/4. */
15679FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15680
15681
15682/** Opcode 0xdf !11/5. */
15683FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15684{
15685 IEMOP_MNEMONIC("fild m64i");
15686
15687 IEM_MC_BEGIN(2, 3);
15688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15689 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15690 IEM_MC_LOCAL(int64_t, i64Val);
15691 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15692 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15693
15694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15696
15697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15699 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15700
15701 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15702 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15703 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15704 IEM_MC_ELSE()
15705 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15706 IEM_MC_ENDIF();
15707 IEM_MC_USED_FPU();
15708 IEM_MC_ADVANCE_RIP();
15709
15710 IEM_MC_END();
15711 return VINF_SUCCESS;
15712}
15713
15714
15715/** Opcode 0xdf !11/6. */
15716FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15717
15718
15719/** Opcode 0xdf !11/7. */
15720FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15721{
15722 IEMOP_MNEMONIC("fistp m64i");
15723 IEM_MC_BEGIN(3, 2);
15724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15725 IEM_MC_LOCAL(uint16_t, u16Fsw);
15726 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15727 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15728 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15729
15730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15733 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15734
15735 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15736 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15737 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15738 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15739 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15740 IEM_MC_ELSE()
15741 IEM_MC_IF_FCW_IM()
15742 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15743 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15744 IEM_MC_ENDIF();
15745 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15746 IEM_MC_ENDIF();
15747 IEM_MC_USED_FPU();
15748 IEM_MC_ADVANCE_RIP();
15749
15750 IEM_MC_END();
15751 return VINF_SUCCESS;
15752}
15753
15754
15755/** Opcode 0xdf. */
15756FNIEMOP_DEF(iemOp_EscF7)
15757{
15758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15760 {
15761 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15762 {
15763 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15764 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15765 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15766 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15767 case 4: if (bRm == 0xe0)
15768 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15769 return IEMOP_RAISE_INVALID_OPCODE();
15770 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15771 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15772 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15774 }
15775 }
15776 else
15777 {
15778 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15779 {
15780 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15781 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15782 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15783 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15784 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15785 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15786 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15787 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15789 }
15790 }
15791}
15792
15793
15794/** Opcode 0xe0. */
15795FNIEMOP_DEF(iemOp_loopne_Jb)
15796{
15797 IEMOP_MNEMONIC("loopne Jb");
15798 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15799 IEMOP_HLP_NO_LOCK_PREFIX();
15800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15801
15802 switch (pIemCpu->enmEffAddrMode)
15803 {
15804 case IEMMODE_16BIT:
15805 IEM_MC_BEGIN(0,0);
15806 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15807 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15808 IEM_MC_REL_JMP_S8(i8Imm);
15809 } IEM_MC_ELSE() {
15810 IEM_MC_ADVANCE_RIP();
15811 } IEM_MC_ENDIF();
15812 IEM_MC_END();
15813 return VINF_SUCCESS;
15814
15815 case IEMMODE_32BIT:
15816 IEM_MC_BEGIN(0,0);
15817 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15818 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15819 IEM_MC_REL_JMP_S8(i8Imm);
15820 } IEM_MC_ELSE() {
15821 IEM_MC_ADVANCE_RIP();
15822 } IEM_MC_ENDIF();
15823 IEM_MC_END();
15824 return VINF_SUCCESS;
15825
15826 case IEMMODE_64BIT:
15827 IEM_MC_BEGIN(0,0);
15828 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15829 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15830 IEM_MC_REL_JMP_S8(i8Imm);
15831 } IEM_MC_ELSE() {
15832 IEM_MC_ADVANCE_RIP();
15833 } IEM_MC_ENDIF();
15834 IEM_MC_END();
15835 return VINF_SUCCESS;
15836
15837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15838 }
15839}
15840
15841
15842/** Opcode 0xe1. */
15843FNIEMOP_DEF(iemOp_loope_Jb)
15844{
15845 IEMOP_MNEMONIC("loope Jb");
15846 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15847 IEMOP_HLP_NO_LOCK_PREFIX();
15848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15849
15850 switch (pIemCpu->enmEffAddrMode)
15851 {
15852 case IEMMODE_16BIT:
15853 IEM_MC_BEGIN(0,0);
15854 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15855 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15856 IEM_MC_REL_JMP_S8(i8Imm);
15857 } IEM_MC_ELSE() {
15858 IEM_MC_ADVANCE_RIP();
15859 } IEM_MC_ENDIF();
15860 IEM_MC_END();
15861 return VINF_SUCCESS;
15862
15863 case IEMMODE_32BIT:
15864 IEM_MC_BEGIN(0,0);
15865 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15866 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15867 IEM_MC_REL_JMP_S8(i8Imm);
15868 } IEM_MC_ELSE() {
15869 IEM_MC_ADVANCE_RIP();
15870 } IEM_MC_ENDIF();
15871 IEM_MC_END();
15872 return VINF_SUCCESS;
15873
15874 case IEMMODE_64BIT:
15875 IEM_MC_BEGIN(0,0);
15876 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15877 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15878 IEM_MC_REL_JMP_S8(i8Imm);
15879 } IEM_MC_ELSE() {
15880 IEM_MC_ADVANCE_RIP();
15881 } IEM_MC_ENDIF();
15882 IEM_MC_END();
15883 return VINF_SUCCESS;
15884
15885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15886 }
15887}
15888
15889
15890/** Opcode 0xe2. */
15891FNIEMOP_DEF(iemOp_loop_Jb)
15892{
15893 IEMOP_MNEMONIC("loop Jb");
15894 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15895 IEMOP_HLP_NO_LOCK_PREFIX();
15896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15897
15898 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
15899 * using the 32-bit operand size override. How can that be restarted? See
15900 * weird pseudo code in intel manual. */
15901 switch (pIemCpu->enmEffAddrMode)
15902 {
15903 case IEMMODE_16BIT:
15904 IEM_MC_BEGIN(0,0);
15905 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15906 {
15907 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15908 IEM_MC_IF_CX_IS_NZ() {
15909 IEM_MC_REL_JMP_S8(i8Imm);
15910 } IEM_MC_ELSE() {
15911 IEM_MC_ADVANCE_RIP();
15912 } IEM_MC_ENDIF();
15913 }
15914 else
15915 {
15916 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
15917 IEM_MC_ADVANCE_RIP();
15918 }
15919 IEM_MC_END();
15920 return VINF_SUCCESS;
15921
15922 case IEMMODE_32BIT:
15923 IEM_MC_BEGIN(0,0);
15924 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15925 {
15926 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15927 IEM_MC_IF_ECX_IS_NZ() {
15928 IEM_MC_REL_JMP_S8(i8Imm);
15929 } IEM_MC_ELSE() {
15930 IEM_MC_ADVANCE_RIP();
15931 } IEM_MC_ENDIF();
15932 }
15933 else
15934 {
15935 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
15936 IEM_MC_ADVANCE_RIP();
15937 }
15938 IEM_MC_END();
15939 return VINF_SUCCESS;
15940
15941 case IEMMODE_64BIT:
15942 IEM_MC_BEGIN(0,0);
15943 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15944 {
15945 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15946 IEM_MC_IF_RCX_IS_NZ() {
15947 IEM_MC_REL_JMP_S8(i8Imm);
15948 } IEM_MC_ELSE() {
15949 IEM_MC_ADVANCE_RIP();
15950 } IEM_MC_ENDIF();
15951 }
15952 else
15953 {
15954 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
15955 IEM_MC_ADVANCE_RIP();
15956 }
15957 IEM_MC_END();
15958 return VINF_SUCCESS;
15959
15960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15961 }
15962}
15963
15964
15965/** Opcode 0xe3. */
15966FNIEMOP_DEF(iemOp_jecxz_Jb)
15967{
15968 IEMOP_MNEMONIC("jecxz Jb");
15969 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15970 IEMOP_HLP_NO_LOCK_PREFIX();
15971 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15972
15973 switch (pIemCpu->enmEffAddrMode)
15974 {
15975 case IEMMODE_16BIT:
15976 IEM_MC_BEGIN(0,0);
15977 IEM_MC_IF_CX_IS_NZ() {
15978 IEM_MC_ADVANCE_RIP();
15979 } IEM_MC_ELSE() {
15980 IEM_MC_REL_JMP_S8(i8Imm);
15981 } IEM_MC_ENDIF();
15982 IEM_MC_END();
15983 return VINF_SUCCESS;
15984
15985 case IEMMODE_32BIT:
15986 IEM_MC_BEGIN(0,0);
15987 IEM_MC_IF_ECX_IS_NZ() {
15988 IEM_MC_ADVANCE_RIP();
15989 } IEM_MC_ELSE() {
15990 IEM_MC_REL_JMP_S8(i8Imm);
15991 } IEM_MC_ENDIF();
15992 IEM_MC_END();
15993 return VINF_SUCCESS;
15994
15995 case IEMMODE_64BIT:
15996 IEM_MC_BEGIN(0,0);
15997 IEM_MC_IF_RCX_IS_NZ() {
15998 IEM_MC_ADVANCE_RIP();
15999 } IEM_MC_ELSE() {
16000 IEM_MC_REL_JMP_S8(i8Imm);
16001 } IEM_MC_ENDIF();
16002 IEM_MC_END();
16003 return VINF_SUCCESS;
16004
16005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16006 }
16007}
16008
16009
16010/** Opcode 0xe4 */
16011FNIEMOP_DEF(iemOp_in_AL_Ib)
16012{
16013 IEMOP_MNEMONIC("in eAX,Ib");
16014 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16015 IEMOP_HLP_NO_LOCK_PREFIX();
16016 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16017}
16018
16019
16020/** Opcode 0xe5 */
16021FNIEMOP_DEF(iemOp_in_eAX_Ib)
16022{
16023 IEMOP_MNEMONIC("in eAX,Ib");
16024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16025 IEMOP_HLP_NO_LOCK_PREFIX();
16026 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16027}
16028
16029
16030/** Opcode 0xe6 */
16031FNIEMOP_DEF(iemOp_out_Ib_AL)
16032{
16033 IEMOP_MNEMONIC("out Ib,AL");
16034 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16035 IEMOP_HLP_NO_LOCK_PREFIX();
16036 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16037}
16038
16039
16040/** Opcode 0xe7 */
16041FNIEMOP_DEF(iemOp_out_Ib_eAX)
16042{
16043 IEMOP_MNEMONIC("out Ib,eAX");
16044 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16045 IEMOP_HLP_NO_LOCK_PREFIX();
16046 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16047}
16048
16049
16050/** Opcode 0xe8. */
16051FNIEMOP_DEF(iemOp_call_Jv)
16052{
16053 IEMOP_MNEMONIC("call Jv");
16054 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16055 switch (pIemCpu->enmEffOpSize)
16056 {
16057 case IEMMODE_16BIT:
16058 {
16059 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16060 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16061 }
16062
16063 case IEMMODE_32BIT:
16064 {
16065 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16066 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16067 }
16068
16069 case IEMMODE_64BIT:
16070 {
16071 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16072 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16073 }
16074
16075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16076 }
16077}
16078
16079
16080/** Opcode 0xe9. */
16081FNIEMOP_DEF(iemOp_jmp_Jv)
16082{
16083 IEMOP_MNEMONIC("jmp Jv");
16084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16085 switch (pIemCpu->enmEffOpSize)
16086 {
16087 case IEMMODE_16BIT:
16088 {
16089 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16090 IEM_MC_BEGIN(0, 0);
16091 IEM_MC_REL_JMP_S16(i16Imm);
16092 IEM_MC_END();
16093 return VINF_SUCCESS;
16094 }
16095
16096 case IEMMODE_64BIT:
16097 case IEMMODE_32BIT:
16098 {
16099 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16100 IEM_MC_BEGIN(0, 0);
16101 IEM_MC_REL_JMP_S32(i32Imm);
16102 IEM_MC_END();
16103 return VINF_SUCCESS;
16104 }
16105
16106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16107 }
16108}
16109
16110
16111/** Opcode 0xea. */
16112FNIEMOP_DEF(iemOp_jmp_Ap)
16113{
16114 IEMOP_MNEMONIC("jmp Ap");
16115 IEMOP_HLP_NO_64BIT();
16116
16117 /* Decode the far pointer address and pass it on to the far call C implementation. */
16118 uint32_t offSeg;
16119 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16120 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16121 else
16122 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16123 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16124 IEMOP_HLP_NO_LOCK_PREFIX();
16125 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16126}
16127
16128
16129/** Opcode 0xeb. */
16130FNIEMOP_DEF(iemOp_jmp_Jb)
16131{
16132 IEMOP_MNEMONIC("jmp Jb");
16133 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16134 IEMOP_HLP_NO_LOCK_PREFIX();
16135 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16136
16137 IEM_MC_BEGIN(0, 0);
16138 IEM_MC_REL_JMP_S8(i8Imm);
16139 IEM_MC_END();
16140 return VINF_SUCCESS;
16141}
16142
16143
16144/** Opcode 0xec */
16145FNIEMOP_DEF(iemOp_in_AL_DX)
16146{
16147 IEMOP_MNEMONIC("in AL,DX");
16148 IEMOP_HLP_NO_LOCK_PREFIX();
16149 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16150}
16151
16152
16153/** Opcode 0xed */
16154FNIEMOP_DEF(iemOp_eAX_DX)
16155{
16156 IEMOP_MNEMONIC("in eAX,DX");
16157 IEMOP_HLP_NO_LOCK_PREFIX();
16158 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16159}
16160
16161
16162/** Opcode 0xee */
16163FNIEMOP_DEF(iemOp_out_DX_AL)
16164{
16165 IEMOP_MNEMONIC("out DX,AL");
16166 IEMOP_HLP_NO_LOCK_PREFIX();
16167 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16168}
16169
16170
16171/** Opcode 0xef */
16172FNIEMOP_DEF(iemOp_out_DX_eAX)
16173{
16174 IEMOP_MNEMONIC("out DX,eAX");
16175 IEMOP_HLP_NO_LOCK_PREFIX();
16176 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16177}
16178
16179
16180/** Opcode 0xf0. */
16181FNIEMOP_DEF(iemOp_lock)
16182{
16183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16184 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16185
16186 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16187 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16188}
16189
16190
16191/** Opcode 0xf1. */
16192FNIEMOP_DEF(iemOp_int_1)
16193{
16194 IEMOP_MNEMONIC("int1"); /* icebp */
16195 /** @todo testcase! */
16196 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16197}
16198
16199
16200/** Opcode 0xf2. */
16201FNIEMOP_DEF(iemOp_repne)
16202{
16203 /* This overrides any previous REPE prefix. */
16204 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16205 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16206 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16207
16208 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16209 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16210}
16211
16212
16213/** Opcode 0xf3. */
16214FNIEMOP_DEF(iemOp_repe)
16215{
16216 /* This overrides any previous REPNE prefix. */
16217 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16219 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16220
16221 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16222 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16223}
16224
16225
16226/** Opcode 0xf4. */
16227FNIEMOP_DEF(iemOp_hlt)
16228{
16229 IEMOP_HLP_NO_LOCK_PREFIX();
16230 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16231}
16232
16233
16234/** Opcode 0xf5. */
16235FNIEMOP_DEF(iemOp_cmc)
16236{
16237 IEMOP_MNEMONIC("cmc");
16238 IEMOP_HLP_NO_LOCK_PREFIX();
16239 IEM_MC_BEGIN(0, 0);
16240 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16241 IEM_MC_ADVANCE_RIP();
16242 IEM_MC_END();
16243 return VINF_SUCCESS;
16244}
16245
16246
16247/**
16248 * Common implementation of 'inc/dec/not/neg Eb'.
16249 *
16250 * @param bRm The RM byte.
16251 * @param pImpl The instruction implementation.
16252 */
16253FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16254{
16255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16256 {
16257 /* register access */
16258 IEM_MC_BEGIN(2, 0);
16259 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16260 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16261 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16262 IEM_MC_REF_EFLAGS(pEFlags);
16263 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16264 IEM_MC_ADVANCE_RIP();
16265 IEM_MC_END();
16266 }
16267 else
16268 {
16269 /* memory access. */
16270 IEM_MC_BEGIN(2, 2);
16271 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16272 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16274
16275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16276 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16277 IEM_MC_FETCH_EFLAGS(EFlags);
16278 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16279 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16280 else
16281 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16282
16283 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16284 IEM_MC_COMMIT_EFLAGS(EFlags);
16285 IEM_MC_ADVANCE_RIP();
16286 IEM_MC_END();
16287 }
16288 return VINF_SUCCESS;
16289}
16290
16291
16292/**
16293 * Common implementation of 'inc/dec/not/neg Ev'.
16294 *
16295 * @param bRm The RM byte.
16296 * @param pImpl The instruction implementation.
16297 */
16298FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16299{
16300 /* Registers are handled by a common worker. */
16301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16302 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16303
16304 /* Memory we do here. */
16305 switch (pIemCpu->enmEffOpSize)
16306 {
16307 case IEMMODE_16BIT:
16308 IEM_MC_BEGIN(2, 2);
16309 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16310 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16312
16313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16314 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16315 IEM_MC_FETCH_EFLAGS(EFlags);
16316 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16317 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16318 else
16319 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16320
16321 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16322 IEM_MC_COMMIT_EFLAGS(EFlags);
16323 IEM_MC_ADVANCE_RIP();
16324 IEM_MC_END();
16325 return VINF_SUCCESS;
16326
16327 case IEMMODE_32BIT:
16328 IEM_MC_BEGIN(2, 2);
16329 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16330 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16332
16333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16334 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16335 IEM_MC_FETCH_EFLAGS(EFlags);
16336 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16337 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16338 else
16339 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16340
16341 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16342 IEM_MC_COMMIT_EFLAGS(EFlags);
16343 IEM_MC_ADVANCE_RIP();
16344 IEM_MC_END();
16345 return VINF_SUCCESS;
16346
16347 case IEMMODE_64BIT:
16348 IEM_MC_BEGIN(2, 2);
16349 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16350 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16352
16353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16354 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16355 IEM_MC_FETCH_EFLAGS(EFlags);
16356 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16357 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16358 else
16359 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16360
16361 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16362 IEM_MC_COMMIT_EFLAGS(EFlags);
16363 IEM_MC_ADVANCE_RIP();
16364 IEM_MC_END();
16365 return VINF_SUCCESS;
16366
16367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16368 }
16369}
16370
16371
16372/** Opcode 0xf6 /0. */
16373FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16374{
16375 IEMOP_MNEMONIC("test Eb,Ib");
16376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16377
16378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16379 {
16380 /* register access */
16381 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16382 IEMOP_HLP_NO_LOCK_PREFIX();
16383
16384 IEM_MC_BEGIN(3, 0);
16385 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16386 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16387 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16388 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16389 IEM_MC_REF_EFLAGS(pEFlags);
16390 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16391 IEM_MC_ADVANCE_RIP();
16392 IEM_MC_END();
16393 }
16394 else
16395 {
16396 /* memory access. */
16397 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16398
16399 IEM_MC_BEGIN(3, 2);
16400 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16401 IEM_MC_ARG(uint8_t, u8Src, 1);
16402 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16404
16405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16406 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16407 IEM_MC_ASSIGN(u8Src, u8Imm);
16408 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16409 IEM_MC_FETCH_EFLAGS(EFlags);
16410 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16411
16412 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16413 IEM_MC_COMMIT_EFLAGS(EFlags);
16414 IEM_MC_ADVANCE_RIP();
16415 IEM_MC_END();
16416 }
16417 return VINF_SUCCESS;
16418}
16419
16420
16421/** Opcode 0xf7 /0. */
16422FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16423{
16424 IEMOP_MNEMONIC("test Ev,Iv");
16425 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16426 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16427
16428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16429 {
16430 /* register access */
16431 switch (pIemCpu->enmEffOpSize)
16432 {
16433 case IEMMODE_16BIT:
16434 {
16435 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16436 IEM_MC_BEGIN(3, 0);
16437 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16438 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16439 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16440 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16441 IEM_MC_REF_EFLAGS(pEFlags);
16442 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16443 IEM_MC_ADVANCE_RIP();
16444 IEM_MC_END();
16445 return VINF_SUCCESS;
16446 }
16447
16448 case IEMMODE_32BIT:
16449 {
16450 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16451 IEM_MC_BEGIN(3, 0);
16452 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16453 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16454 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16455 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16456 IEM_MC_REF_EFLAGS(pEFlags);
16457 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16458 /* No clearing the high dword here - test doesn't write back the result. */
16459 IEM_MC_ADVANCE_RIP();
16460 IEM_MC_END();
16461 return VINF_SUCCESS;
16462 }
16463
16464 case IEMMODE_64BIT:
16465 {
16466 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16467 IEM_MC_BEGIN(3, 0);
16468 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16469 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16470 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16471 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16472 IEM_MC_REF_EFLAGS(pEFlags);
16473 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16474 IEM_MC_ADVANCE_RIP();
16475 IEM_MC_END();
16476 return VINF_SUCCESS;
16477 }
16478
16479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16480 }
16481 }
16482 else
16483 {
16484 /* memory access. */
16485 switch (pIemCpu->enmEffOpSize)
16486 {
16487 case IEMMODE_16BIT:
16488 {
16489 IEM_MC_BEGIN(3, 2);
16490 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16491 IEM_MC_ARG(uint16_t, u16Src, 1);
16492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16494
16495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16496 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16497 IEM_MC_ASSIGN(u16Src, u16Imm);
16498 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16499 IEM_MC_FETCH_EFLAGS(EFlags);
16500 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16501
16502 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16503 IEM_MC_COMMIT_EFLAGS(EFlags);
16504 IEM_MC_ADVANCE_RIP();
16505 IEM_MC_END();
16506 return VINF_SUCCESS;
16507 }
16508
16509 case IEMMODE_32BIT:
16510 {
16511 IEM_MC_BEGIN(3, 2);
16512 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16513 IEM_MC_ARG(uint32_t, u32Src, 1);
16514 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16516
16517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16518 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16519 IEM_MC_ASSIGN(u32Src, u32Imm);
16520 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16521 IEM_MC_FETCH_EFLAGS(EFlags);
16522 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16523
16524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16525 IEM_MC_COMMIT_EFLAGS(EFlags);
16526 IEM_MC_ADVANCE_RIP();
16527 IEM_MC_END();
16528 return VINF_SUCCESS;
16529 }
16530
16531 case IEMMODE_64BIT:
16532 {
16533 IEM_MC_BEGIN(3, 2);
16534 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16535 IEM_MC_ARG(uint64_t, u64Src, 1);
16536 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16538
16539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16540 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16541 IEM_MC_ASSIGN(u64Src, u64Imm);
16542 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16543 IEM_MC_FETCH_EFLAGS(EFlags);
16544 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16545
16546 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16547 IEM_MC_COMMIT_EFLAGS(EFlags);
16548 IEM_MC_ADVANCE_RIP();
16549 IEM_MC_END();
16550 return VINF_SUCCESS;
16551 }
16552
16553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16554 }
16555 }
16556}
16557
16558
16559/** Opcode 0xf6 /4, /5, /6 and /7. */
16560FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16561{
16562 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16563
16564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16565 {
16566 /* register access */
16567 IEMOP_HLP_NO_LOCK_PREFIX();
16568 IEM_MC_BEGIN(3, 1);
16569 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16570 IEM_MC_ARG(uint8_t, u8Value, 1);
16571 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16572 IEM_MC_LOCAL(int32_t, rc);
16573
16574 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16575 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16576 IEM_MC_REF_EFLAGS(pEFlags);
16577 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16578 IEM_MC_IF_LOCAL_IS_Z(rc) {
16579 IEM_MC_ADVANCE_RIP();
16580 } IEM_MC_ELSE() {
16581 IEM_MC_RAISE_DIVIDE_ERROR();
16582 } IEM_MC_ENDIF();
16583
16584 IEM_MC_END();
16585 }
16586 else
16587 {
16588 /* memory access. */
16589 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16590
16591 IEM_MC_BEGIN(3, 2);
16592 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16593 IEM_MC_ARG(uint8_t, u8Value, 1);
16594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16596 IEM_MC_LOCAL(int32_t, rc);
16597
16598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16599 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16600 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16601 IEM_MC_REF_EFLAGS(pEFlags);
16602 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16603 IEM_MC_IF_LOCAL_IS_Z(rc) {
16604 IEM_MC_ADVANCE_RIP();
16605 } IEM_MC_ELSE() {
16606 IEM_MC_RAISE_DIVIDE_ERROR();
16607 } IEM_MC_ENDIF();
16608
16609 IEM_MC_END();
16610 }
16611 return VINF_SUCCESS;
16612}
16613
16614
16615/** Opcode 0xf7 /4, /5, /6 and /7. */
16616FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16617{
16618 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16619 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16620
16621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16622 {
16623 /* register access */
16624 switch (pIemCpu->enmEffOpSize)
16625 {
16626 case IEMMODE_16BIT:
16627 {
16628 IEMOP_HLP_NO_LOCK_PREFIX();
16629 IEM_MC_BEGIN(4, 1);
16630 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16631 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16632 IEM_MC_ARG(uint16_t, u16Value, 2);
16633 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16634 IEM_MC_LOCAL(int32_t, rc);
16635
16636 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16637 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16638 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16639 IEM_MC_REF_EFLAGS(pEFlags);
16640 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16641 IEM_MC_IF_LOCAL_IS_Z(rc) {
16642 IEM_MC_ADVANCE_RIP();
16643 } IEM_MC_ELSE() {
16644 IEM_MC_RAISE_DIVIDE_ERROR();
16645 } IEM_MC_ENDIF();
16646
16647 IEM_MC_END();
16648 return VINF_SUCCESS;
16649 }
16650
16651 case IEMMODE_32BIT:
16652 {
16653 IEMOP_HLP_NO_LOCK_PREFIX();
16654 IEM_MC_BEGIN(4, 1);
16655 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16656 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16657 IEM_MC_ARG(uint32_t, u32Value, 2);
16658 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16659 IEM_MC_LOCAL(int32_t, rc);
16660
16661 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16662 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16663 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16664 IEM_MC_REF_EFLAGS(pEFlags);
16665 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16666 IEM_MC_IF_LOCAL_IS_Z(rc) {
16667 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16668 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16669 IEM_MC_ADVANCE_RIP();
16670 } IEM_MC_ELSE() {
16671 IEM_MC_RAISE_DIVIDE_ERROR();
16672 } IEM_MC_ENDIF();
16673
16674 IEM_MC_END();
16675 return VINF_SUCCESS;
16676 }
16677
16678 case IEMMODE_64BIT:
16679 {
16680 IEMOP_HLP_NO_LOCK_PREFIX();
16681 IEM_MC_BEGIN(4, 1);
16682 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16683 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16684 IEM_MC_ARG(uint64_t, u64Value, 2);
16685 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16686 IEM_MC_LOCAL(int32_t, rc);
16687
16688 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16689 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16690 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16691 IEM_MC_REF_EFLAGS(pEFlags);
16692 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16693 IEM_MC_IF_LOCAL_IS_Z(rc) {
16694 IEM_MC_ADVANCE_RIP();
16695 } IEM_MC_ELSE() {
16696 IEM_MC_RAISE_DIVIDE_ERROR();
16697 } IEM_MC_ENDIF();
16698
16699 IEM_MC_END();
16700 return VINF_SUCCESS;
16701 }
16702
16703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16704 }
16705 }
16706 else
16707 {
16708 /* memory access. */
16709 switch (pIemCpu->enmEffOpSize)
16710 {
16711 case IEMMODE_16BIT:
16712 {
16713 IEMOP_HLP_NO_LOCK_PREFIX();
16714 IEM_MC_BEGIN(4, 2);
16715 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16716 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16717 IEM_MC_ARG(uint16_t, u16Value, 2);
16718 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16720 IEM_MC_LOCAL(int32_t, rc);
16721
16722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16723 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16724 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16725 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16726 IEM_MC_REF_EFLAGS(pEFlags);
16727 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16728 IEM_MC_IF_LOCAL_IS_Z(rc) {
16729 IEM_MC_ADVANCE_RIP();
16730 } IEM_MC_ELSE() {
16731 IEM_MC_RAISE_DIVIDE_ERROR();
16732 } IEM_MC_ENDIF();
16733
16734 IEM_MC_END();
16735 return VINF_SUCCESS;
16736 }
16737
16738 case IEMMODE_32BIT:
16739 {
16740 IEMOP_HLP_NO_LOCK_PREFIX();
16741 IEM_MC_BEGIN(4, 2);
16742 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16743 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16744 IEM_MC_ARG(uint32_t, u32Value, 2);
16745 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16747 IEM_MC_LOCAL(int32_t, rc);
16748
16749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16750 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16751 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16752 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16753 IEM_MC_REF_EFLAGS(pEFlags);
16754 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16755 IEM_MC_IF_LOCAL_IS_Z(rc) {
16756 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16757 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16758 IEM_MC_ADVANCE_RIP();
16759 } IEM_MC_ELSE() {
16760 IEM_MC_RAISE_DIVIDE_ERROR();
16761 } IEM_MC_ENDIF();
16762
16763 IEM_MC_END();
16764 return VINF_SUCCESS;
16765 }
16766
16767 case IEMMODE_64BIT:
16768 {
16769 IEMOP_HLP_NO_LOCK_PREFIX();
16770 IEM_MC_BEGIN(4, 2);
16771 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16772 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16773 IEM_MC_ARG(uint64_t, u64Value, 2);
16774 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16776 IEM_MC_LOCAL(int32_t, rc);
16777
16778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16779 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16780 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16781 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16782 IEM_MC_REF_EFLAGS(pEFlags);
16783 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16784 IEM_MC_IF_LOCAL_IS_Z(rc) {
16785 IEM_MC_ADVANCE_RIP();
16786 } IEM_MC_ELSE() {
16787 IEM_MC_RAISE_DIVIDE_ERROR();
16788 } IEM_MC_ENDIF();
16789
16790 IEM_MC_END();
16791 return VINF_SUCCESS;
16792 }
16793
16794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16795 }
16796 }
16797}
16798
16799/** Opcode 0xf6. */
16800FNIEMOP_DEF(iemOp_Grp3_Eb)
16801{
16802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16803 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16804 {
16805 case 0:
16806 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16807 case 1:
16808 return IEMOP_RAISE_INVALID_OPCODE();
16809 case 2:
16810 IEMOP_MNEMONIC("not Eb");
16811 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16812 case 3:
16813 IEMOP_MNEMONIC("neg Eb");
16814 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16815 case 4:
16816 IEMOP_MNEMONIC("mul Eb");
16817 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16818 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16819 case 5:
16820 IEMOP_MNEMONIC("imul Eb");
16821 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16822 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16823 case 6:
16824 IEMOP_MNEMONIC("div Eb");
16825 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16826 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16827 case 7:
16828 IEMOP_MNEMONIC("idiv Eb");
16829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16830 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16832 }
16833}
16834
16835
16836/** Opcode 0xf7. */
16837FNIEMOP_DEF(iemOp_Grp3_Ev)
16838{
16839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16840 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16841 {
16842 case 0:
16843 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
16844 case 1:
16845 return IEMOP_RAISE_INVALID_OPCODE();
16846 case 2:
16847 IEMOP_MNEMONIC("not Ev");
16848 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
16849 case 3:
16850 IEMOP_MNEMONIC("neg Ev");
16851 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
16852 case 4:
16853 IEMOP_MNEMONIC("mul Ev");
16854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16855 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
16856 case 5:
16857 IEMOP_MNEMONIC("imul Ev");
16858 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16859 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
16860 case 6:
16861 IEMOP_MNEMONIC("div Ev");
16862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16863 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
16864 case 7:
16865 IEMOP_MNEMONIC("idiv Ev");
16866 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16867 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
16868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16869 }
16870}
16871
16872
16873/** Opcode 0xf8. */
16874FNIEMOP_DEF(iemOp_clc)
16875{
16876 IEMOP_MNEMONIC("clc");
16877 IEMOP_HLP_NO_LOCK_PREFIX();
16878 IEM_MC_BEGIN(0, 0);
16879 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
16880 IEM_MC_ADVANCE_RIP();
16881 IEM_MC_END();
16882 return VINF_SUCCESS;
16883}
16884
16885
16886/** Opcode 0xf9. */
16887FNIEMOP_DEF(iemOp_stc)
16888{
16889 IEMOP_MNEMONIC("stc");
16890 IEMOP_HLP_NO_LOCK_PREFIX();
16891 IEM_MC_BEGIN(0, 0);
16892 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
16893 IEM_MC_ADVANCE_RIP();
16894 IEM_MC_END();
16895 return VINF_SUCCESS;
16896}
16897
16898
16899/** Opcode 0xfa. */
16900FNIEMOP_DEF(iemOp_cli)
16901{
16902 IEMOP_MNEMONIC("cli");
16903 IEMOP_HLP_NO_LOCK_PREFIX();
16904 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
16905}
16906
16907
16908FNIEMOP_DEF(iemOp_sti)
16909{
16910 IEMOP_MNEMONIC("sti");
16911 IEMOP_HLP_NO_LOCK_PREFIX();
16912 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
16913}
16914
16915
16916/** Opcode 0xfc. */
16917FNIEMOP_DEF(iemOp_cld)
16918{
16919 IEMOP_MNEMONIC("cld");
16920 IEMOP_HLP_NO_LOCK_PREFIX();
16921 IEM_MC_BEGIN(0, 0);
16922 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
16923 IEM_MC_ADVANCE_RIP();
16924 IEM_MC_END();
16925 return VINF_SUCCESS;
16926}
16927
16928
16929/** Opcode 0xfd. */
16930FNIEMOP_DEF(iemOp_std)
16931{
16932 IEMOP_MNEMONIC("std");
16933 IEMOP_HLP_NO_LOCK_PREFIX();
16934 IEM_MC_BEGIN(0, 0);
16935 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
16936 IEM_MC_ADVANCE_RIP();
16937 IEM_MC_END();
16938 return VINF_SUCCESS;
16939}
16940
16941
16942/** Opcode 0xfe. */
16943FNIEMOP_DEF(iemOp_Grp4)
16944{
16945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16946 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16947 {
16948 case 0:
16949 IEMOP_MNEMONIC("inc Ev");
16950 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
16951 case 1:
16952 IEMOP_MNEMONIC("dec Ev");
16953 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
16954 default:
16955 IEMOP_MNEMONIC("grp4-ud");
16956 return IEMOP_RAISE_INVALID_OPCODE();
16957 }
16958}
16959
16960
16961/**
16962 * Opcode 0xff /2.
16963 * @param bRm The RM byte.
16964 */
16965FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
16966{
16967 IEMOP_MNEMONIC("calln Ev");
16968 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
16969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16970
16971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16972 {
16973 /* The new RIP is taken from a register. */
16974 switch (pIemCpu->enmEffOpSize)
16975 {
16976 case IEMMODE_16BIT:
16977 IEM_MC_BEGIN(1, 0);
16978 IEM_MC_ARG(uint16_t, u16Target, 0);
16979 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16980 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
16981 IEM_MC_END()
16982 return VINF_SUCCESS;
16983
16984 case IEMMODE_32BIT:
16985 IEM_MC_BEGIN(1, 0);
16986 IEM_MC_ARG(uint32_t, u32Target, 0);
16987 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16988 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
16989 IEM_MC_END()
16990 return VINF_SUCCESS;
16991
16992 case IEMMODE_64BIT:
16993 IEM_MC_BEGIN(1, 0);
16994 IEM_MC_ARG(uint64_t, u64Target, 0);
16995 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16996 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
16997 IEM_MC_END()
16998 return VINF_SUCCESS;
16999
17000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17001 }
17002 }
17003 else
17004 {
17005 /* The new RIP is taken from a register. */
17006 switch (pIemCpu->enmEffOpSize)
17007 {
17008 case IEMMODE_16BIT:
17009 IEM_MC_BEGIN(1, 1);
17010 IEM_MC_ARG(uint16_t, u16Target, 0);
17011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17013 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17014 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17015 IEM_MC_END()
17016 return VINF_SUCCESS;
17017
17018 case IEMMODE_32BIT:
17019 IEM_MC_BEGIN(1, 1);
17020 IEM_MC_ARG(uint32_t, u32Target, 0);
17021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17023 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17024 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17025 IEM_MC_END()
17026 return VINF_SUCCESS;
17027
17028 case IEMMODE_64BIT:
17029 IEM_MC_BEGIN(1, 1);
17030 IEM_MC_ARG(uint64_t, u64Target, 0);
17031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17033 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17034 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17035 IEM_MC_END()
17036 return VINF_SUCCESS;
17037
17038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17039 }
17040 }
17041}
17042
17043typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17044
17045FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17046{
17047 /* Registers? How?? */
17048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17049 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17050
17051 /* Far pointer loaded from memory. */
17052 switch (pIemCpu->enmEffOpSize)
17053 {
17054 case IEMMODE_16BIT:
17055 IEM_MC_BEGIN(3, 1);
17056 IEM_MC_ARG(uint16_t, u16Sel, 0);
17057 IEM_MC_ARG(uint16_t, offSeg, 1);
17058 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17062 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17063 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17064 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17065 IEM_MC_END();
17066 return VINF_SUCCESS;
17067
17068 case IEMMODE_64BIT:
17069 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17070 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17071 * and call far qword [rsp] encodings. */
17072 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17073 {
17074 IEM_MC_BEGIN(3, 1);
17075 IEM_MC_ARG(uint16_t, u16Sel, 0);
17076 IEM_MC_ARG(uint64_t, offSeg, 1);
17077 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17081 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17082 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17083 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17084 IEM_MC_END();
17085 return VINF_SUCCESS;
17086 }
17087 /* AMD falls thru. */
17088
17089 case IEMMODE_32BIT:
17090 IEM_MC_BEGIN(3, 1);
17091 IEM_MC_ARG(uint16_t, u16Sel, 0);
17092 IEM_MC_ARG(uint32_t, offSeg, 1);
17093 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17097 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17098 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17099 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17100 IEM_MC_END();
17101 return VINF_SUCCESS;
17102
17103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17104 }
17105}
17106
17107
17108/**
17109 * Opcode 0xff /3.
17110 * @param bRm The RM byte.
17111 */
17112FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17113{
17114 IEMOP_MNEMONIC("callf Ep");
17115 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17116}
17117
17118
17119/**
17120 * Opcode 0xff /4.
17121 * @param bRm The RM byte.
17122 */
17123FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17124{
17125 IEMOP_MNEMONIC("jmpn Ev");
17126 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17128
17129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17130 {
17131 /* The new RIP is taken from a register. */
17132 switch (pIemCpu->enmEffOpSize)
17133 {
17134 case IEMMODE_16BIT:
17135 IEM_MC_BEGIN(0, 1);
17136 IEM_MC_LOCAL(uint16_t, u16Target);
17137 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17138 IEM_MC_SET_RIP_U16(u16Target);
17139 IEM_MC_END()
17140 return VINF_SUCCESS;
17141
17142 case IEMMODE_32BIT:
17143 IEM_MC_BEGIN(0, 1);
17144 IEM_MC_LOCAL(uint32_t, u32Target);
17145 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17146 IEM_MC_SET_RIP_U32(u32Target);
17147 IEM_MC_END()
17148 return VINF_SUCCESS;
17149
17150 case IEMMODE_64BIT:
17151 IEM_MC_BEGIN(0, 1);
17152 IEM_MC_LOCAL(uint64_t, u64Target);
17153 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17154 IEM_MC_SET_RIP_U64(u64Target);
17155 IEM_MC_END()
17156 return VINF_SUCCESS;
17157
17158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17159 }
17160 }
17161 else
17162 {
17163 /* The new RIP is taken from a memory location. */
17164 switch (pIemCpu->enmEffOpSize)
17165 {
17166 case IEMMODE_16BIT:
17167 IEM_MC_BEGIN(0, 2);
17168 IEM_MC_LOCAL(uint16_t, u16Target);
17169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17171 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17172 IEM_MC_SET_RIP_U16(u16Target);
17173 IEM_MC_END()
17174 return VINF_SUCCESS;
17175
17176 case IEMMODE_32BIT:
17177 IEM_MC_BEGIN(0, 2);
17178 IEM_MC_LOCAL(uint32_t, u32Target);
17179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17181 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17182 IEM_MC_SET_RIP_U32(u32Target);
17183 IEM_MC_END()
17184 return VINF_SUCCESS;
17185
17186 case IEMMODE_64BIT:
17187 IEM_MC_BEGIN(0, 2);
17188 IEM_MC_LOCAL(uint64_t, u64Target);
17189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17191 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17192 IEM_MC_SET_RIP_U64(u64Target);
17193 IEM_MC_END()
17194 return VINF_SUCCESS;
17195
17196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17197 }
17198 }
17199}
17200
17201
17202/**
17203 * Opcode 0xff /5.
17204 * @param bRm The RM byte.
17205 */
17206FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17207{
17208 IEMOP_MNEMONIC("jmpf Ep");
17209 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17210}
17211
17212
17213/**
17214 * Opcode 0xff /6.
17215 * @param bRm The RM byte.
17216 */
17217FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17218{
17219 IEMOP_MNEMONIC("push Ev");
17220 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17221
17222 /* Registers are handled by a common worker. */
17223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17224 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17225
17226 /* Memory we do here. */
17227 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17228 switch (pIemCpu->enmEffOpSize)
17229 {
17230 case IEMMODE_16BIT:
17231 IEM_MC_BEGIN(0, 2);
17232 IEM_MC_LOCAL(uint16_t, u16Src);
17233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17235 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17236 IEM_MC_PUSH_U16(u16Src);
17237 IEM_MC_ADVANCE_RIP();
17238 IEM_MC_END();
17239 return VINF_SUCCESS;
17240
17241 case IEMMODE_32BIT:
17242 IEM_MC_BEGIN(0, 2);
17243 IEM_MC_LOCAL(uint32_t, u32Src);
17244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17246 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17247 IEM_MC_PUSH_U32(u32Src);
17248 IEM_MC_ADVANCE_RIP();
17249 IEM_MC_END();
17250 return VINF_SUCCESS;
17251
17252 case IEMMODE_64BIT:
17253 IEM_MC_BEGIN(0, 2);
17254 IEM_MC_LOCAL(uint64_t, u64Src);
17255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17257 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17258 IEM_MC_PUSH_U64(u64Src);
17259 IEM_MC_ADVANCE_RIP();
17260 IEM_MC_END();
17261 return VINF_SUCCESS;
17262
17263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17264 }
17265}
17266
17267
17268/** Opcode 0xff. */
17269FNIEMOP_DEF(iemOp_Grp5)
17270{
17271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17272 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17273 {
17274 case 0:
17275 IEMOP_MNEMONIC("inc Ev");
17276 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17277 case 1:
17278 IEMOP_MNEMONIC("dec Ev");
17279 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17280 case 2:
17281 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17282 case 3:
17283 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17284 case 4:
17285 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17286 case 5:
17287 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17288 case 6:
17289 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17290 case 7:
17291 IEMOP_MNEMONIC("grp5-ud");
17292 return IEMOP_RAISE_INVALID_OPCODE();
17293 }
17294 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
17295}
17296
17297
17298
17299const PFNIEMOP g_apfnOneByteMap[256] =
17300{
17301 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17302 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17303 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17304 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17305 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17306 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17307 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17308 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17309 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17310 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17311 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17312 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17313 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17314 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17315 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17316 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17317 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17318 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17319 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17320 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17321 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17322 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17323 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17324 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17325 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17326 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17327 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17328 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17329 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17330 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17331 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17332 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17333 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17334 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17335 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17336 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17337 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17338 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17339 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17340 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17341 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17342 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17343 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17344 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17345 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17346 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17347 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17348 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17349 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17350 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17351 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17352 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17353 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17354 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17355 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17356 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17357 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17358 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17359 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17360 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17361 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17362 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17363 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17364 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17365};
17366
17367
17368/** @} */
17369
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette