VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 42186

Last change on this file since 42186 was 42024, checked in by vboxsync, 12 years ago

VMM: RDTSCP support on Intel. Segregated some common CPU features from the AMD superset into Extended features as they're now available on Intel too.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 504.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 42024 2012-07-05 12:10:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2012 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 IEM_MC_ADVANCE_RIP();
133 IEM_MC_END();
134 break;
135
136 case IEMMODE_64BIT:
137 IEM_MC_BEGIN(3, 0);
138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
139 IEM_MC_ARG(uint64_t, u64Src, 1);
140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
141
142 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
143 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
144 IEM_MC_REF_EFLAGS(pEFlags);
145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
146
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 break;
150 }
151 }
152 else
153 {
154 /*
155 * We're accessing memory.
156 * Note! We're putting the eflags on the stack here so we can commit them
157 * after the memory.
158 */
159 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
160 switch (pIemCpu->enmEffOpSize)
161 {
162 case IEMMODE_16BIT:
163 IEM_MC_BEGIN(3, 2);
164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
165 IEM_MC_ARG(uint16_t, u16Src, 1);
166 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
168
169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
170 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
171 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
172 IEM_MC_FETCH_EFLAGS(EFlags);
173 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
175 else
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
177
178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
179 IEM_MC_COMMIT_EFLAGS(EFlags);
180 IEM_MC_ADVANCE_RIP();
181 IEM_MC_END();
182 break;
183
184 case IEMMODE_32BIT:
185 IEM_MC_BEGIN(3, 2);
186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
187 IEM_MC_ARG(uint32_t, u32Src, 1);
188 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
190
191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
192 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
193 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
194 IEM_MC_FETCH_EFLAGS(EFlags);
195 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
197 else
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
199
200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
201 IEM_MC_COMMIT_EFLAGS(EFlags);
202 IEM_MC_ADVANCE_RIP();
203 IEM_MC_END();
204 break;
205
206 case IEMMODE_64BIT:
207 IEM_MC_BEGIN(3, 2);
208 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
209 IEM_MC_ARG(uint64_t, u64Src, 1);
210 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
212
213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
214 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
215 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
216 IEM_MC_FETCH_EFLAGS(EFlags);
217 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
219 else
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
221
222 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
223 IEM_MC_COMMIT_EFLAGS(EFlags);
224 IEM_MC_ADVANCE_RIP();
225 IEM_MC_END();
226 break;
227 }
228 }
229 return VINF_SUCCESS;
230}
231
232
233/**
234 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
235 * the destination.
236 *
237 * @param pImpl Pointer to the instruction implementation (assembly).
238 */
239FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
240{
241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
242 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
243
244 /*
245 * If rm is denoting a register, no more instruction bytes.
246 */
247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
248 {
249 IEM_MC_BEGIN(3, 0);
250 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
251 IEM_MC_ARG(uint8_t, u8Src, 1);
252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
253
254 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
255 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
256 IEM_MC_REF_EFLAGS(pEFlags);
257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
258
259 IEM_MC_ADVANCE_RIP();
260 IEM_MC_END();
261 }
262 else
263 {
264 /*
265 * We're accessing memory.
266 */
267 IEM_MC_BEGIN(3, 1);
268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
269 IEM_MC_ARG(uint8_t, u8Src, 1);
270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
272
273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
274 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
275 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
276 IEM_MC_REF_EFLAGS(pEFlags);
277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
288 * register as the destination.
289 *
290 * @param pImpl Pointer to the instruction implementation (assembly).
291 */
292FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
293{
294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
295 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
296
297 /*
298 * If rm is denoting a register, no more instruction bytes.
299 */
300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
301 {
302 switch (pIemCpu->enmEffOpSize)
303 {
304 case IEMMODE_16BIT:
305 IEM_MC_BEGIN(3, 0);
306 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
307 IEM_MC_ARG(uint16_t, u16Src, 1);
308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
309
310 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
311 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
312 IEM_MC_REF_EFLAGS(pEFlags);
313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 break;
318
319 case IEMMODE_32BIT:
320 IEM_MC_BEGIN(3, 0);
321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
322 IEM_MC_ARG(uint32_t, u32Src, 1);
323 IEM_MC_ARG(uint32_t *, pEFlags, 2);
324
325 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
326 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
327 IEM_MC_REF_EFLAGS(pEFlags);
328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
329
330 IEM_MC_ADVANCE_RIP();
331 IEM_MC_END();
332 break;
333
334 case IEMMODE_64BIT:
335 IEM_MC_BEGIN(3, 0);
336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
337 IEM_MC_ARG(uint64_t, u64Src, 1);
338 IEM_MC_ARG(uint32_t *, pEFlags, 2);
339
340 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
341 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
342 IEM_MC_REF_EFLAGS(pEFlags);
343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
344
345 IEM_MC_ADVANCE_RIP();
346 IEM_MC_END();
347 break;
348 }
349 }
350 else
351 {
352 /*
353 * We're accessing memory.
354 */
355 switch (pIemCpu->enmEffOpSize)
356 {
357 case IEMMODE_16BIT:
358 IEM_MC_BEGIN(3, 1);
359 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
360 IEM_MC_ARG(uint16_t, u16Src, 1);
361 IEM_MC_ARG(uint32_t *, pEFlags, 2);
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
363
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
365 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
366 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
367 IEM_MC_REF_EFLAGS(pEFlags);
368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
369
370 IEM_MC_ADVANCE_RIP();
371 IEM_MC_END();
372 break;
373
374 case IEMMODE_32BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
377 IEM_MC_ARG(uint32_t, u32Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
382 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
383 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
384 IEM_MC_REF_EFLAGS(pEFlags);
385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
386
387 IEM_MC_ADVANCE_RIP();
388 IEM_MC_END();
389 break;
390
391 case IEMMODE_64BIT:
392 IEM_MC_BEGIN(3, 1);
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
394 IEM_MC_ARG(uint64_t, u64Src, 1);
395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
397
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
399 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
400 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
401 IEM_MC_REF_EFLAGS(pEFlags);
402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
403
404 IEM_MC_ADVANCE_RIP();
405 IEM_MC_END();
406 break;
407 }
408 }
409 return VINF_SUCCESS;
410}
411
412
413/**
414 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
415 * a byte immediate.
416 *
417 * @param pImpl Pointer to the instruction implementation (assembly).
418 */
419FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
420{
421 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
422 IEMOP_HLP_NO_LOCK_PREFIX();
423
424 IEM_MC_BEGIN(3, 0);
425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
426 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
427 IEM_MC_ARG(uint32_t *, pEFlags, 2);
428
429 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
430 IEM_MC_REF_EFLAGS(pEFlags);
431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
432
433 IEM_MC_ADVANCE_RIP();
434 IEM_MC_END();
435 return VINF_SUCCESS;
436}
437
438
439/**
440 * Common worker for instructions like ADD, AND, OR, ++ with working on
441 * AX/EAX/RAX with a word/dword immediate.
442 *
443 * @param pImpl Pointer to the instruction implementation (assembly).
444 */
445FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
446{
447 switch (pIemCpu->enmEffOpSize)
448 {
449 case IEMMODE_16BIT:
450 {
451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
452 IEMOP_HLP_NO_LOCK_PREFIX();
453
454 IEM_MC_BEGIN(3, 0);
455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
456 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
458
459 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
460 IEM_MC_REF_EFLAGS(pEFlags);
461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
462
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 return VINF_SUCCESS;
466 }
467
468 case IEMMODE_32BIT:
469 {
470 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
471 IEMOP_HLP_NO_LOCK_PREFIX();
472
473 IEM_MC_BEGIN(3, 0);
474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
475 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
477
478 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
479 IEM_MC_REF_EFLAGS(pEFlags);
480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
481
482 IEM_MC_ADVANCE_RIP();
483 IEM_MC_END();
484 return VINF_SUCCESS;
485 }
486
487 case IEMMODE_64BIT:
488 {
489 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
490 IEMOP_HLP_NO_LOCK_PREFIX();
491
492 IEM_MC_BEGIN(3, 0);
493 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
494 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
495 IEM_MC_ARG(uint32_t *, pEFlags, 2);
496
497 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
498 IEM_MC_REF_EFLAGS(pEFlags);
499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
500
501 IEM_MC_ADVANCE_RIP();
502 IEM_MC_END();
503 return VINF_SUCCESS;
504 }
505
506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
507 }
508}
509
510
511/** Opcodes 0xf1, 0xd6. */
512FNIEMOP_DEF(iemOp_Invalid)
513{
514 IEMOP_MNEMONIC("Invalid");
515 return IEMOP_RAISE_INVALID_OPCODE();
516}
517
518
519
520/** @name ..... opcodes.
521 *
522 * @{
523 */
524
525/** @} */
526
527
528/** @name Two byte opcodes (first byte 0x0f).
529 *
530 * @{
531 */
532
533/** Opcode 0x0f 0x00 /0. */
534FNIEMOP_STUB_1(iemOp_Grp6_sldt, uint8_t, bRm);
535
536
537/** Opcode 0x0f 0x00 /1. */
538FNIEMOP_STUB_1(iemOp_Grp6_str, uint8_t, bRm);
539
540
541/** Opcode 0x0f 0x00 /2. */
542FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
543{
544 IEMOP_HLP_NO_LOCK_PREFIX();
545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
546 {
547 IEM_MC_BEGIN(1, 0);
548 IEM_MC_ARG(uint16_t, u16Sel, 0);
549 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
550 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
551 IEM_MC_END();
552 }
553 else
554 {
555 IEM_MC_BEGIN(1, 1);
556 IEM_MC_ARG(uint16_t, u16Sel, 0);
557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
558 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
560 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
561 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
562 IEM_MC_END();
563 }
564 return VINF_SUCCESS;
565}
566
567
568/** Opcode 0x0f 0x00 /3. */
569FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
570{
571 IEMOP_HLP_NO_LOCK_PREFIX();
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEM_MC_BEGIN(1, 0);
575 IEM_MC_ARG(uint16_t, u16Sel, 0);
576 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
577 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
578 IEM_MC_END();
579 }
580 else
581 {
582 IEM_MC_BEGIN(1, 1);
583 IEM_MC_ARG(uint16_t, u16Sel, 0);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
585 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
587 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
588 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
589 IEM_MC_END();
590 }
591 return VINF_SUCCESS;
592}
593
594
595/** Opcode 0x0f 0x00 /4. */
596FNIEMOP_STUB_1(iemOp_Grp6_verr, uint8_t, bRm);
597
598
599/** Opcode 0x0f 0x00 /5. */
600FNIEMOP_STUB_1(iemOp_Grp6_verw, uint8_t, bRm);
601
602
603/** Opcode 0x0f 0x00. */
604FNIEMOP_DEF(iemOp_Grp6)
605{
606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
607 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
608 {
609 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
610 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
611 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
612 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
613 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
614 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
615 case 6: return IEMOP_RAISE_INVALID_OPCODE();
616 case 7: return IEMOP_RAISE_INVALID_OPCODE();
617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
618 }
619
620}
621
622
623/** Opcode 0x0f 0x01 /0. */
624FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
625{
626 NOREF(pIemCpu); NOREF(bRm);
627 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
628}
629
630
631/** Opcode 0x0f 0x01 /0. */
632FNIEMOP_DEF(iemOp_Grp7_vmcall)
633{
634 AssertFailed();
635 return IEMOP_RAISE_INVALID_OPCODE();
636}
637
638
639/** Opcode 0x0f 0x01 /0. */
640FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
641{
642 AssertFailed();
643 return IEMOP_RAISE_INVALID_OPCODE();
644}
645
646
647/** Opcode 0x0f 0x01 /0. */
648FNIEMOP_DEF(iemOp_Grp7_vmresume)
649{
650 AssertFailed();
651 return IEMOP_RAISE_INVALID_OPCODE();
652}
653
654
655/** Opcode 0x0f 0x01 /0. */
656FNIEMOP_DEF(iemOp_Grp7_vmxoff)
657{
658 AssertFailed();
659 return IEMOP_RAISE_INVALID_OPCODE();
660}
661
662
663/** Opcode 0x0f 0x01 /1. */
664FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
665{
666 NOREF(pIemCpu); NOREF(bRm);
667 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
668}
669
670
671/** Opcode 0x0f 0x01 /1. */
672FNIEMOP_DEF(iemOp_Grp7_monitor)
673{
674 NOREF(pIemCpu);
675 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
676}
677
678
679/** Opcode 0x0f 0x01 /1. */
680FNIEMOP_DEF(iemOp_Grp7_mwait)
681{
682 NOREF(pIemCpu);
683 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
684}
685
686
687/** Opcode 0x0f 0x01 /2. */
688FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
689{
690 IEMOP_HLP_NO_LOCK_PREFIX();
691
692 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
693 ? IEMMODE_64BIT
694 : pIemCpu->enmEffOpSize;
695 IEM_MC_BEGIN(3, 1);
696 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
697 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
698 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
700 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
701 IEM_MC_END();
702 return VINF_SUCCESS;
703}
704
705
706/** Opcode 0x0f 0x01 /2. */
707FNIEMOP_DEF(iemOp_Grp7_xgetbv)
708{
709 AssertFailed();
710 return IEMOP_RAISE_INVALID_OPCODE();
711}
712
713
714/** Opcode 0x0f 0x01 /2. */
715FNIEMOP_DEF(iemOp_Grp7_xsetbv)
716{
717 AssertFailed();
718 return IEMOP_RAISE_INVALID_OPCODE();
719}
720
721
722/** Opcode 0x0f 0x01 /3. */
723FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
724{
725 IEMOP_HLP_NO_LOCK_PREFIX();
726
727 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
728 ? IEMMODE_64BIT
729 : pIemCpu->enmEffOpSize;
730 IEM_MC_BEGIN(3, 1);
731 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
732 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
733 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
735 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
736 IEM_MC_END();
737 return VINF_SUCCESS;
738}
739
740
741/** Opcode 0x0f 0x01 0xd8. */
742FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
743
744/** Opcode 0x0f 0x01 0xd9. */
745FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
746
747/** Opcode 0x0f 0x01 0xda. */
748FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
749
750/** Opcode 0x0f 0x01 0xdb. */
751FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
752
753/** Opcode 0x0f 0x01 0xdc. */
754FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
755
756/** Opcode 0x0f 0x01 0xdd. */
757FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
758
759/** Opcode 0x0f 0x01 0xde. */
760FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
761
762/** Opcode 0x0f 0x01 0xdf. */
763FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
764
765/** Opcode 0x0f 0x01 /4. */
766FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
767{
768 IEMOP_HLP_NO_LOCK_PREFIX();
769 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
770 {
771 switch (pIemCpu->enmEffOpSize)
772 {
773 case IEMMODE_16BIT:
774 IEM_MC_BEGIN(0, 1);
775 IEM_MC_LOCAL(uint16_t, u16Tmp);
776 IEM_MC_FETCH_CR0_U16(u16Tmp);
777 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
778 IEM_MC_ADVANCE_RIP();
779 IEM_MC_END();
780 return VINF_SUCCESS;
781
782 case IEMMODE_32BIT:
783 IEM_MC_BEGIN(0, 1);
784 IEM_MC_LOCAL(uint32_t, u32Tmp);
785 IEM_MC_FETCH_CR0_U32(u32Tmp);
786 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
787 IEM_MC_ADVANCE_RIP();
788 IEM_MC_END();
789 return VINF_SUCCESS;
790
791 case IEMMODE_64BIT:
792 IEM_MC_BEGIN(0, 1);
793 IEM_MC_LOCAL(uint64_t, u64Tmp);
794 IEM_MC_FETCH_CR0_U64(u64Tmp);
795 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 return VINF_SUCCESS;
799
800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
801 }
802 }
803 else
804 {
805 /* Ignore operand size here, memory refs are always 16-bit. */
806 IEM_MC_BEGIN(0, 2);
807 IEM_MC_LOCAL(uint16_t, u16Tmp);
808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
810 IEM_MC_FETCH_CR0_U16(u16Tmp);
811 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
812 IEM_MC_ADVANCE_RIP();
813 IEM_MC_END();
814 return VINF_SUCCESS;
815 }
816}
817
818
819/** Opcode 0x0f 0x01 /6. */
820FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
821{
822 /* The operand size is effectively ignored, all is 16-bit and only the
823 lower 3-bits are used. */
824 IEMOP_HLP_NO_LOCK_PREFIX();
825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
826 {
827 IEM_MC_BEGIN(1, 0);
828 IEM_MC_ARG(uint16_t, u16Tmp, 0);
829 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
830 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
831 IEM_MC_END();
832 }
833 else
834 {
835 IEM_MC_BEGIN(1, 1);
836 IEM_MC_ARG(uint16_t, u16Tmp, 0);
837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
839 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
840 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
841 IEM_MC_END();
842 }
843 return VINF_SUCCESS;
844}
845
846
847/** Opcode 0x0f 0x01 /7. */
848FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
849{
850 IEMOP_HLP_NO_LOCK_PREFIX();
851 IEM_MC_BEGIN(1, 1);
852 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
854 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
855 IEM_MC_END();
856 return VINF_SUCCESS;
857}
858
859
860/** Opcode 0x0f 0x01 /7. */
861FNIEMOP_DEF(iemOp_Grp7_swapgs)
862{
863 NOREF(pIemCpu);
864 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
865}
866
867
868/** Opcode 0x0f 0x01 /7. */
869FNIEMOP_DEF(iemOp_Grp7_rdtscp)
870{
871 NOREF(pIemCpu);
872 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
873}
874
875
876/** Opcode 0x0f 0x01. */
877FNIEMOP_DEF(iemOp_Grp7)
878{
879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
880 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
881 {
882 case 0:
883 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
884 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
885 switch (bRm & X86_MODRM_RM_MASK)
886 {
887 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
888 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
889 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
890 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
891 }
892 return IEMOP_RAISE_INVALID_OPCODE();
893
894 case 1:
895 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
896 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
897 switch (bRm & X86_MODRM_RM_MASK)
898 {
899 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
900 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903
904 case 2:
905 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
906 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
907 switch (bRm & X86_MODRM_RM_MASK)
908 {
909 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
910 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
911 }
912 return IEMOP_RAISE_INVALID_OPCODE();
913
914 case 3:
915 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
916 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
917 switch (bRm & X86_MODRM_RM_MASK)
918 {
919 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
920 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
921 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
922 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
923 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
924 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
925 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
926 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
928 }
929
930 case 4:
931 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
932
933 case 5:
934 return IEMOP_RAISE_INVALID_OPCODE();
935
936 case 6:
937 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
938
939 case 7:
940 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
941 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
942 switch (bRm & X86_MODRM_RM_MASK)
943 {
944 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
945 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
946 }
947 return IEMOP_RAISE_INVALID_OPCODE();
948
949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
950 }
951}
952
953
954/** Opcode 0x0f 0x02. */
955FNIEMOP_STUB(iemOp_lar_Gv_Ew);
956/** Opcode 0x0f 0x03. */
957FNIEMOP_STUB(iemOp_lsl_Gv_Ew);
958/** Opcode 0x0f 0x04. */
959FNIEMOP_STUB(iemOp_syscall);
960
961
962/** Opcode 0x0f 0x05. */
963FNIEMOP_DEF(iemOp_clts)
964{
965 IEMOP_MNEMONIC("clts");
966 IEMOP_HLP_NO_LOCK_PREFIX();
967 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
968}
969
970
971/** Opcode 0x0f 0x06. */
972FNIEMOP_STUB(iemOp_sysret);
973/** Opcode 0x0f 0x08. */
974FNIEMOP_STUB(iemOp_invd);
975/** Opcode 0x0f 0x09. */
976FNIEMOP_STUB(iemOp_wbinvd);
977/** Opcode 0x0f 0x0b. */
978FNIEMOP_STUB(iemOp_ud2);
979
980/** Opcode 0x0f 0x0d. */
981FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
982{
983 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
984 if (!IEM_IS_AMD_CPUID_FEATURES_ANY_PRESENT(X86_CPUID_EXT_FEATURE_EDX_LONG_MODE | X86_CPUID_AMD_FEATURE_EDX_3DNOW,
985 X86_CPUID_AMD_FEATURE_ECX_3DNOWPRF))
986 {
987 IEMOP_MNEMONIC("GrpP");
988 return IEMOP_RAISE_INVALID_OPCODE();
989 }
990
991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
993 {
994 IEMOP_MNEMONIC("GrpP");
995 return IEMOP_RAISE_INVALID_OPCODE();
996 }
997
998 IEMOP_HLP_NO_LOCK_PREFIX();
999 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1000 {
1001 case 2: /* Aliased to /0 for the time being. */
1002 case 4: /* Aliased to /0 for the time being. */
1003 case 5: /* Aliased to /0 for the time being. */
1004 case 6: /* Aliased to /0 for the time being. */
1005 case 7: /* Aliased to /0 for the time being. */
1006 case 0: IEMOP_MNEMONIC("prefetch"); break;
1007 case 1: IEMOP_MNEMONIC("prefetchw "); break;
1008 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1010 }
1011
1012 IEM_MC_BEGIN(0, 1);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1015 /* Currently a NOP. */
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 return VINF_SUCCESS;
1019}
1020
1021
1022/** Opcode 0x0f 0x0e. */
1023FNIEMOP_STUB(iemOp_femms);
1024
1025
1026/** Opcode 0x0f 0x0f 0x0c. */
1027FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1028
1029/** Opcode 0x0f 0x0f 0x0d. */
1030FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1031
1032/** Opcode 0x0f 0x0f 0x1c. */
1033FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1034
1035/** Opcode 0x0f 0x0f 0x1d. */
1036FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1037
1038/** Opcode 0x0f 0x0f 0x8a. */
1039FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1040
1041/** Opcode 0x0f 0x0f 0x8e. */
1042FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1043
1044/** Opcode 0x0f 0x0f 0x90. */
1045FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1046
1047/** Opcode 0x0f 0x0f 0x94. */
1048FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1049
1050/** Opcode 0x0f 0x0f 0x96. */
1051FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1052
1053/** Opcode 0x0f 0x0f 0x97. */
1054FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1055
1056/** Opcode 0x0f 0x0f 0x9a. */
1057FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1058
1059/** Opcode 0x0f 0x0f 0x9e. */
1060FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1061
1062/** Opcode 0x0f 0x0f 0xa0. */
1063FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1064
1065/** Opcode 0x0f 0x0f 0xa4. */
1066FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1067
1068/** Opcode 0x0f 0x0f 0xa6. */
1069FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1070
1071/** Opcode 0x0f 0x0f 0xa7. */
1072FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1073
1074/** Opcode 0x0f 0x0f 0xaa. */
1075FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1076
1077/** Opcode 0x0f 0x0f 0xae. */
1078FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1079
1080/** Opcode 0x0f 0x0f 0xb0. */
1081FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1082
1083/** Opcode 0x0f 0x0f 0xb4. */
1084FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1085
1086/** Opcode 0x0f 0x0f 0xb6. */
1087FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1088
1089/** Opcode 0x0f 0x0f 0xb7. */
1090FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1091
1092/** Opcode 0x0f 0x0f 0xbb. */
1093FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1094
1095/** Opcode 0x0f 0x0f 0xbf. */
1096FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1097
1098
1099/** Opcode 0x0f 0x0f. */
1100FNIEMOP_DEF(iemOp_3Dnow)
1101{
1102 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_AMD_FEATURE_EDX_3DNOW))
1103 {
1104 IEMOP_MNEMONIC("3Dnow");
1105 return IEMOP_RAISE_INVALID_OPCODE();
1106 }
1107
1108 /* This is pretty sparse, use switch instead of table. */
1109 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1110 switch (b)
1111 {
1112 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1113 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1114 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1115 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1116 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1117 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1118 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1119 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1120 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1121 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1122 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1123 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1124 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1125 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1126 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1127 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1128 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1129 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1130 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1131 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1132 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1133 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1134 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1135 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1136 default:
1137 return IEMOP_RAISE_INVALID_OPCODE();
1138 }
1139}
1140
1141
1142/** Opcode 0x0f 0x10. */
1143FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1144/** Opcode 0x0f 0x11. */
1145FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1146/** Opcode 0x0f 0x12. */
1147FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq);
1148/** Opcode 0x0f 0x13. */
1149FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq);
1150/** Opcode 0x0f 0x14. */
1151FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1152/** Opcode 0x0f 0x15. */
1153FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1154/** Opcode 0x0f 0x16. */
1155FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq);
1156/** Opcode 0x0f 0x17. */
1157FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq);
1158
1159
1160/** Opcode 0x0f 0x18. */
1161FNIEMOP_DEF(iemOp_prefetch_Grp16)
1162{
1163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1164 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1165 {
1166 IEMOP_HLP_NO_LOCK_PREFIX();
1167 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1168 {
1169 case 4: /* Aliased to /0 for the time being according to AMD. */
1170 case 5: /* Aliased to /0 for the time being according to AMD. */
1171 case 6: /* Aliased to /0 for the time being according to AMD. */
1172 case 7: /* Aliased to /0 for the time being according to AMD. */
1173 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1174 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1175 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1176 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1178 }
1179
1180 IEM_MC_BEGIN(0, 1);
1181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1183 /* Currently a NOP. */
1184 IEM_MC_ADVANCE_RIP();
1185 IEM_MC_END();
1186 return VINF_SUCCESS;
1187 }
1188
1189 return IEMOP_RAISE_INVALID_OPCODE();
1190}
1191
1192
1193/** Opcode 0x0f 0x19..0x1f. */
1194FNIEMOP_DEF(iemOp_nop_Ev)
1195{
1196 IEMOP_HLP_NO_LOCK_PREFIX();
1197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1199 {
1200 IEM_MC_BEGIN(0, 0);
1201 IEM_MC_ADVANCE_RIP();
1202 IEM_MC_END();
1203 }
1204 else
1205 {
1206 IEM_MC_BEGIN(0, 1);
1207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1209 /* Currently a NOP. */
1210 IEM_MC_ADVANCE_RIP();
1211 IEM_MC_END();
1212 }
1213 return VINF_SUCCESS;
1214}
1215
1216
1217/** Opcode 0x0f 0x20. */
1218FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1219{
1220 /* mod is ignored, as is operand size overrides. */
1221 IEMOP_MNEMONIC("mov Rd,Cd");
1222 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1223 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1224 else
1225 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1226
1227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1228 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1229 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1230 {
1231 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1232 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1233 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1234 iCrReg |= 8;
1235 }
1236 switch (iCrReg)
1237 {
1238 case 0: case 2: case 3: case 4: case 8:
1239 break;
1240 default:
1241 return IEMOP_RAISE_INVALID_OPCODE();
1242 }
1243 IEMOP_HLP_DONE_DECODING();
1244
1245 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1246}
1247
1248
1249/** Opcode 0x0f 0x21. */
1250FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1251{
1252 IEMOP_MNEMONIC("mov Rd,Dd");
1253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1254 IEMOP_HLP_NO_LOCK_PREFIX();
1255 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1256 return IEMOP_RAISE_INVALID_OPCODE();
1257 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1258 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1259 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1260}
1261
1262
1263/** Opcode 0x0f 0x22. */
1264FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1265{
1266 /* mod is ignored, as is operand size overrides. */
1267 IEMOP_MNEMONIC("mov Cd,Rd");
1268 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1269 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1270 else
1271 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1272
1273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1274 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1275 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1276 {
1277 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1278 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1279 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1280 iCrReg |= 8;
1281 }
1282 switch (iCrReg)
1283 {
1284 case 0: case 2: case 3: case 4: case 8:
1285 break;
1286 default:
1287 return IEMOP_RAISE_INVALID_OPCODE();
1288 }
1289 IEMOP_HLP_DONE_DECODING();
1290
1291 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1292}
1293
1294
1295/** Opcode 0x0f 0x23. */
1296FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1297{
1298 IEMOP_MNEMONIC("mov Dd,Rd");
1299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1301 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1302 return IEMOP_RAISE_INVALID_OPCODE();
1303 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1304 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1305 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1306}
1307
1308
1309/** Opcode 0x0f 0x24. */
1310FNIEMOP_DEF(iemOp_mov_Rd_Td)
1311{
1312 IEMOP_MNEMONIC("mov Rd,Td");
1313 /* The RM byte is not considered, see testcase. */
1314 return IEMOP_RAISE_INVALID_OPCODE();
1315}
1316
1317
1318/** Opcode 0x0f 0x26. */
1319FNIEMOP_DEF(iemOp_mov_Td_Rd)
1320{
1321 IEMOP_MNEMONIC("mov Td,Rd");
1322 /* The RM byte is not considered, see testcase. */
1323 return IEMOP_RAISE_INVALID_OPCODE();
1324}
1325
1326
1327/** Opcode 0x0f 0x28. */
1328FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1329/** Opcode 0x0f 0x29. */
1330FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1331/** Opcode 0x0f 0x2a. */
1332FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey);
1333/** Opcode 0x0f 0x2b. */
1334FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1335/** Opcode 0x0f 0x2c. */
1336FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd);
1337/** Opcode 0x0f 0x2d. */
1338FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1339/** Opcode 0x0f 0x2e. */
1340FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd);
1341/** Opcode 0x0f 0x2f. */
1342FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1343/** Opcode 0x0f 0x30. */
1344FNIEMOP_STUB(iemOp_wrmsr);
1345
1346
1347/** Opcode 0x0f 0x31. */
1348FNIEMOP_DEF(iemOp_rdtsc)
1349{
1350 IEMOP_MNEMONIC("rdtsc");
1351 IEMOP_HLP_NO_LOCK_PREFIX();
1352 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1353}
1354
1355
1356/** Opcode 0x0f 0x33. */
1357FNIEMOP_DEF(iemOp_rdmsr)
1358{
1359 IEMOP_MNEMONIC("rdmsr");
1360 IEMOP_HLP_NO_LOCK_PREFIX();
1361 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1362}
1363
1364
1365/** Opcode 0x0f 0x34. */
1366FNIEMOP_STUB(iemOp_rdpmc);
1367/** Opcode 0x0f 0x34. */
1368FNIEMOP_STUB(iemOp_sysenter);
1369/** Opcode 0x0f 0x35. */
1370FNIEMOP_STUB(iemOp_sysexit);
1371/** Opcode 0x0f 0x37. */
1372FNIEMOP_STUB(iemOp_getsec);
1373/** Opcode 0x0f 0x38. */
1374FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1375/** Opcode 0x0f 0x3a. */
1376FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1377/** Opcode 0x0f 0x3c (?). */
1378FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1379
1380/**
1381 * Implements a conditional move.
1382 *
1383 * Wish there was an obvious way to do this where we could share and reduce
1384 * code bloat.
1385 *
1386 * @param a_Cnd The conditional "microcode" operation.
1387 */
1388#define CMOV_X(a_Cnd) \
1389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1391 { \
1392 switch (pIemCpu->enmEffOpSize) \
1393 { \
1394 case IEMMODE_16BIT: \
1395 IEM_MC_BEGIN(0, 1); \
1396 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1397 a_Cnd { \
1398 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1399 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1400 } IEM_MC_ENDIF(); \
1401 IEM_MC_ADVANCE_RIP(); \
1402 IEM_MC_END(); \
1403 return VINF_SUCCESS; \
1404 \
1405 case IEMMODE_32BIT: \
1406 IEM_MC_BEGIN(0, 1); \
1407 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1408 a_Cnd { \
1409 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1410 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1411 } IEM_MC_ELSE() { \
1412 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1413 } IEM_MC_ENDIF(); \
1414 IEM_MC_ADVANCE_RIP(); \
1415 IEM_MC_END(); \
1416 return VINF_SUCCESS; \
1417 \
1418 case IEMMODE_64BIT: \
1419 IEM_MC_BEGIN(0, 1); \
1420 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1421 a_Cnd { \
1422 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1423 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1424 } IEM_MC_ENDIF(); \
1425 IEM_MC_ADVANCE_RIP(); \
1426 IEM_MC_END(); \
1427 return VINF_SUCCESS; \
1428 \
1429 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1430 } \
1431 } \
1432 else \
1433 { \
1434 switch (pIemCpu->enmEffOpSize) \
1435 { \
1436 case IEMMODE_16BIT: \
1437 IEM_MC_BEGIN(0, 2); \
1438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1439 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1441 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1442 a_Cnd { \
1443 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1444 } IEM_MC_ENDIF(); \
1445 IEM_MC_ADVANCE_RIP(); \
1446 IEM_MC_END(); \
1447 return VINF_SUCCESS; \
1448 \
1449 case IEMMODE_32BIT: \
1450 IEM_MC_BEGIN(0, 2); \
1451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1452 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1454 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1455 a_Cnd { \
1456 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1457 } IEM_MC_ELSE() { \
1458 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1459 } IEM_MC_ENDIF(); \
1460 IEM_MC_ADVANCE_RIP(); \
1461 IEM_MC_END(); \
1462 return VINF_SUCCESS; \
1463 \
1464 case IEMMODE_64BIT: \
1465 IEM_MC_BEGIN(0, 2); \
1466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1467 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1469 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1470 a_Cnd { \
1471 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1472 } IEM_MC_ENDIF(); \
1473 IEM_MC_ADVANCE_RIP(); \
1474 IEM_MC_END(); \
1475 return VINF_SUCCESS; \
1476 \
1477 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1478 } \
1479 } do {} while (0)
1480
1481
1482
1483/** Opcode 0x0f 0x40. */
1484FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1485{
1486 IEMOP_MNEMONIC("cmovo Gv,Ev");
1487 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1488}
1489
1490
1491/** Opcode 0x0f 0x41. */
1492FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1493{
1494 IEMOP_MNEMONIC("cmovno Gv,Ev");
1495 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1496}
1497
1498
1499/** Opcode 0x0f 0x42. */
1500FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1501{
1502 IEMOP_MNEMONIC("cmovc Gv,Ev");
1503 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1504}
1505
1506
1507/** Opcode 0x0f 0x43. */
1508FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1509{
1510 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1511 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1512}
1513
1514
1515/** Opcode 0x0f 0x44. */
1516FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1517{
1518 IEMOP_MNEMONIC("cmove Gv,Ev");
1519 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1520}
1521
1522
1523/** Opcode 0x0f 0x45. */
1524FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1525{
1526 IEMOP_MNEMONIC("cmovne Gv,Ev");
1527 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1528}
1529
1530
1531/** Opcode 0x0f 0x46. */
1532FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1533{
1534 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1535 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1536}
1537
1538
1539/** Opcode 0x0f 0x47. */
1540FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1541{
1542 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1543 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1544}
1545
1546
1547/** Opcode 0x0f 0x48. */
1548FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1549{
1550 IEMOP_MNEMONIC("cmovs Gv,Ev");
1551 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1552}
1553
1554
1555/** Opcode 0x0f 0x49. */
1556FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1557{
1558 IEMOP_MNEMONIC("cmovns Gv,Ev");
1559 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1560}
1561
1562
1563/** Opcode 0x0f 0x4a. */
1564FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1565{
1566 IEMOP_MNEMONIC("cmovp Gv,Ev");
1567 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1568}
1569
1570
1571/** Opcode 0x0f 0x4b. */
1572FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1573{
1574 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1575 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1576}
1577
1578
1579/** Opcode 0x0f 0x4c. */
1580FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1581{
1582 IEMOP_MNEMONIC("cmovl Gv,Ev");
1583 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1584}
1585
1586
1587/** Opcode 0x0f 0x4d. */
1588FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1589{
1590 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1591 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1592}
1593
1594
1595/** Opcode 0x0f 0x4e. */
1596FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1597{
1598 IEMOP_MNEMONIC("cmovle Gv,Ev");
1599 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1600}
1601
1602
1603/** Opcode 0x0f 0x4f. */
1604FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1605{
1606 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1607 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1608}
1609
1610#undef CMOV_X
1611
1612/** Opcode 0x0f 0x50. */
1613FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1614/** Opcode 0x0f 0x51. */
1615FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1616/** Opcode 0x0f 0x52. */
1617FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1618/** Opcode 0x0f 0x53. */
1619FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1620/** Opcode 0x0f 0x54. */
1621FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1622/** Opcode 0x0f 0x55. */
1623FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1624/** Opcode 0x0f 0x56. */
1625FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1626/** Opcode 0x0f 0x57. */
1627FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1628/** Opcode 0x0f 0x58. */
1629FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd);
1630/** Opcode 0x0f 0x59. */
1631FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);
1632/** Opcode 0x0f 0x5a. */
1633FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1634/** Opcode 0x0f 0x5b. */
1635FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1636/** Opcode 0x0f 0x5c. */
1637FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1638/** Opcode 0x0f 0x5d. */
1639FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1640/** Opcode 0x0f 0x5e. */
1641FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1642/** Opcode 0x0f 0x5f. */
1643FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1644/** Opcode 0x0f 0x60. */
1645FNIEMOP_STUB(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq);
1646/** Opcode 0x0f 0x61. */
1647FNIEMOP_STUB(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq);
1648/** Opcode 0x0f 0x62. */
1649FNIEMOP_STUB(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq);
1650/** Opcode 0x0f 0x63. */
1651FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
1652/** Opcode 0x0f 0x64. */
1653FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
1654/** Opcode 0x0f 0x65. */
1655FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
1656/** Opcode 0x0f 0x66. */
1657FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
1658/** Opcode 0x0f 0x67. */
1659FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
1660/** Opcode 0x0f 0x68. */
1661FNIEMOP_STUB(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq);
1662/** Opcode 0x0f 0x69. */
1663FNIEMOP_STUB(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq);
1664/** Opcode 0x0f 0x6a. */
1665FNIEMOP_STUB(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq);
1666/** Opcode 0x0f 0x6b. */
1667FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
1668/** Opcode 0x0f 0x6c. */
1669FNIEMOP_STUB(iemOp_punpcklqdq_Vdq_Wdq);
1670/** Opcode 0x0f 0x6d. */
1671FNIEMOP_STUB(iemOp_punpckhqdq_Vdq_Wdq);
1672/** Opcode 0x0f 0x6e. */
1673FNIEMOP_STUB(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey);
1674/** Opcode 0x0f 0x6f. */
1675FNIEMOP_STUB(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq);
1676/** Opcode 0x0f 0x70. */
1677FNIEMOP_STUB(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib);
1678
1679/** Opcode 0x0f 0x71 11/2. */
1680FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
1681
1682/** Opcode 0x66 0x0f 0x71 11/2. */
1683FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
1684
1685/** Opcode 0x0f 0x71 11/4. */
1686FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
1687
1688/** Opcode 0x66 0x0f 0x71 11/4. */
1689FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
1690
1691/** Opcode 0x0f 0x71 11/6. */
1692FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
1693
1694/** Opcode 0x66 0x0f 0x71 11/6. */
1695FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
1696
1697
1698/** Opcode 0x0f 0x71. */
1699FNIEMOP_DEF(iemOp_Grp12)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1703 return IEMOP_RAISE_INVALID_OPCODE();
1704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1705 {
1706 case 0: case 1: case 3: case 5: case 7:
1707 return IEMOP_RAISE_INVALID_OPCODE();
1708 case 2:
1709 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1710 {
1711 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
1712 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
1713 default: return IEMOP_RAISE_INVALID_OPCODE();
1714 }
1715 case 4:
1716 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1717 {
1718 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
1719 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
1720 default: return IEMOP_RAISE_INVALID_OPCODE();
1721 }
1722 case 6:
1723 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1724 {
1725 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
1726 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
1727 default: return IEMOP_RAISE_INVALID_OPCODE();
1728 }
1729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1730 }
1731}
1732
1733
1734/** Opcode 0x0f 0x72 11/2. */
1735FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
1736
1737/** Opcode 0x66 0x0f 0x72 11/2. */
1738FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
1739
1740/** Opcode 0x0f 0x72 11/4. */
1741FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
1742
1743/** Opcode 0x66 0x0f 0x72 11/4. */
1744FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
1745
1746/** Opcode 0x0f 0x72 11/6. */
1747FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
1748
1749/** Opcode 0x66 0x0f 0x72 11/6. */
1750FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
1751
1752
1753/** Opcode 0x0f 0x72. */
1754FNIEMOP_DEF(iemOp_Grp13)
1755{
1756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1757 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1758 return IEMOP_RAISE_INVALID_OPCODE();
1759 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1760 {
1761 case 0: case 1: case 3: case 5: case 7:
1762 return IEMOP_RAISE_INVALID_OPCODE();
1763 case 2:
1764 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1765 {
1766 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
1767 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
1768 default: return IEMOP_RAISE_INVALID_OPCODE();
1769 }
1770 case 4:
1771 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1772 {
1773 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
1774 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
1775 default: return IEMOP_RAISE_INVALID_OPCODE();
1776 }
1777 case 6:
1778 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1779 {
1780 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
1781 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
1782 default: return IEMOP_RAISE_INVALID_OPCODE();
1783 }
1784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1785 }
1786}
1787
1788
1789/** Opcode 0x0f 0x73 11/2. */
1790FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
1791
1792/** Opcode 0x66 0x0f 0x73 11/2. */
1793FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
1794
1795/** Opcode 0x66 0x0f 0x73 11/3. */
1796FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm);
1797
1798/** Opcode 0x0f 0x73 11/6. */
1799FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
1800
1801/** Opcode 0x66 0x0f 0x73 11/6. */
1802FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
1803
1804/** Opcode 0x66 0x0f 0x73 11/7. */
1805FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm);
1806
1807
1808/** Opcode 0x0f 0x73. */
1809FNIEMOP_DEF(iemOp_Grp14)
1810{
1811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1812 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1813 return IEMOP_RAISE_INVALID_OPCODE();
1814 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1815 {
1816 case 0: case 1: case 4: case 5:
1817 return IEMOP_RAISE_INVALID_OPCODE();
1818 case 2:
1819 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1820 {
1821 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
1822 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
1823 default: return IEMOP_RAISE_INVALID_OPCODE();
1824 }
1825 case 3:
1826 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1827 {
1828 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
1829 default: return IEMOP_RAISE_INVALID_OPCODE();
1830 }
1831 case 6:
1832 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1833 {
1834 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
1835 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
1836 default: return IEMOP_RAISE_INVALID_OPCODE();
1837 }
1838 case 7:
1839 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1840 {
1841 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
1842 default: return IEMOP_RAISE_INVALID_OPCODE();
1843 }
1844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1845 }
1846}
1847
1848
1849/** Opcode 0x0f 0x74. */
1850FNIEMOP_STUB(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq);
1851/** Opcode 0x0f 0x75. */
1852FNIEMOP_STUB(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq);
1853/** Opcode 0x0f 0x76. */
1854FNIEMOP_STUB(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq);
1855/** Opcode 0x0f 0x77. */
1856FNIEMOP_STUB(iemOp_emms);
1857/** Opcode 0x0f 0x78. */
1858FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
1859/** Opcode 0x0f 0x79. */
1860FNIEMOP_UD_STUB(iemOp_vmwrite);
1861/** Opcode 0x0f 0x7c. */
1862FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
1863/** Opcode 0x0f 0x7d. */
1864FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
1865/** Opcode 0x0f 0x7e. */
1866FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq);
1867/** Opcode 0x0f 0x7f. */
1868FNIEMOP_STUB(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq);
1869
1870
1871/** Opcode 0x0f 0x80. */
1872FNIEMOP_DEF(iemOp_jo_Jv)
1873{
1874 IEMOP_MNEMONIC("jo Jv");
1875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1876 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1877 {
1878 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1879 IEMOP_HLP_NO_LOCK_PREFIX();
1880
1881 IEM_MC_BEGIN(0, 0);
1882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1883 IEM_MC_REL_JMP_S16(i16Imm);
1884 } IEM_MC_ELSE() {
1885 IEM_MC_ADVANCE_RIP();
1886 } IEM_MC_ENDIF();
1887 IEM_MC_END();
1888 }
1889 else
1890 {
1891 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1892 IEMOP_HLP_NO_LOCK_PREFIX();
1893
1894 IEM_MC_BEGIN(0, 0);
1895 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1896 IEM_MC_REL_JMP_S32(i32Imm);
1897 } IEM_MC_ELSE() {
1898 IEM_MC_ADVANCE_RIP();
1899 } IEM_MC_ENDIF();
1900 IEM_MC_END();
1901 }
1902 return VINF_SUCCESS;
1903}
1904
1905
1906/** Opcode 0x0f 0x81. */
1907FNIEMOP_DEF(iemOp_jno_Jv)
1908{
1909 IEMOP_MNEMONIC("jno Jv");
1910 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1911 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1912 {
1913 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1914 IEMOP_HLP_NO_LOCK_PREFIX();
1915
1916 IEM_MC_BEGIN(0, 0);
1917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1918 IEM_MC_ADVANCE_RIP();
1919 } IEM_MC_ELSE() {
1920 IEM_MC_REL_JMP_S16(i16Imm);
1921 } IEM_MC_ENDIF();
1922 IEM_MC_END();
1923 }
1924 else
1925 {
1926 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1927 IEMOP_HLP_NO_LOCK_PREFIX();
1928
1929 IEM_MC_BEGIN(0, 0);
1930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1931 IEM_MC_ADVANCE_RIP();
1932 } IEM_MC_ELSE() {
1933 IEM_MC_REL_JMP_S32(i32Imm);
1934 } IEM_MC_ENDIF();
1935 IEM_MC_END();
1936 }
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/** Opcode 0x0f 0x82. */
1942FNIEMOP_DEF(iemOp_jc_Jv)
1943{
1944 IEMOP_MNEMONIC("jc/jb/jnae Jv");
1945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1946 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1947 {
1948 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1949 IEMOP_HLP_NO_LOCK_PREFIX();
1950
1951 IEM_MC_BEGIN(0, 0);
1952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1953 IEM_MC_REL_JMP_S16(i16Imm);
1954 } IEM_MC_ELSE() {
1955 IEM_MC_ADVANCE_RIP();
1956 } IEM_MC_ENDIF();
1957 IEM_MC_END();
1958 }
1959 else
1960 {
1961 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1962 IEMOP_HLP_NO_LOCK_PREFIX();
1963
1964 IEM_MC_BEGIN(0, 0);
1965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1966 IEM_MC_REL_JMP_S32(i32Imm);
1967 } IEM_MC_ELSE() {
1968 IEM_MC_ADVANCE_RIP();
1969 } IEM_MC_ENDIF();
1970 IEM_MC_END();
1971 }
1972 return VINF_SUCCESS;
1973}
1974
1975
1976/** Opcode 0x0f 0x83. */
1977FNIEMOP_DEF(iemOp_jnc_Jv)
1978{
1979 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
1980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1981 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1982 {
1983 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1984 IEMOP_HLP_NO_LOCK_PREFIX();
1985
1986 IEM_MC_BEGIN(0, 0);
1987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1988 IEM_MC_ADVANCE_RIP();
1989 } IEM_MC_ELSE() {
1990 IEM_MC_REL_JMP_S16(i16Imm);
1991 } IEM_MC_ENDIF();
1992 IEM_MC_END();
1993 }
1994 else
1995 {
1996 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1997 IEMOP_HLP_NO_LOCK_PREFIX();
1998
1999 IEM_MC_BEGIN(0, 0);
2000 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2001 IEM_MC_ADVANCE_RIP();
2002 } IEM_MC_ELSE() {
2003 IEM_MC_REL_JMP_S32(i32Imm);
2004 } IEM_MC_ENDIF();
2005 IEM_MC_END();
2006 }
2007 return VINF_SUCCESS;
2008}
2009
2010
2011/** Opcode 0x0f 0x84. */
2012FNIEMOP_DEF(iemOp_je_Jv)
2013{
2014 IEMOP_MNEMONIC("je/jz Jv");
2015 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2016 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2017 {
2018 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2019 IEMOP_HLP_NO_LOCK_PREFIX();
2020
2021 IEM_MC_BEGIN(0, 0);
2022 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2023 IEM_MC_REL_JMP_S16(i16Imm);
2024 } IEM_MC_ELSE() {
2025 IEM_MC_ADVANCE_RIP();
2026 } IEM_MC_ENDIF();
2027 IEM_MC_END();
2028 }
2029 else
2030 {
2031 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2032 IEMOP_HLP_NO_LOCK_PREFIX();
2033
2034 IEM_MC_BEGIN(0, 0);
2035 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2036 IEM_MC_REL_JMP_S32(i32Imm);
2037 } IEM_MC_ELSE() {
2038 IEM_MC_ADVANCE_RIP();
2039 } IEM_MC_ENDIF();
2040 IEM_MC_END();
2041 }
2042 return VINF_SUCCESS;
2043}
2044
2045
2046/** Opcode 0x0f 0x85. */
2047FNIEMOP_DEF(iemOp_jne_Jv)
2048{
2049 IEMOP_MNEMONIC("jne/jnz Jv");
2050 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2051 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2052 {
2053 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2054 IEMOP_HLP_NO_LOCK_PREFIX();
2055
2056 IEM_MC_BEGIN(0, 0);
2057 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2058 IEM_MC_ADVANCE_RIP();
2059 } IEM_MC_ELSE() {
2060 IEM_MC_REL_JMP_S16(i16Imm);
2061 } IEM_MC_ENDIF();
2062 IEM_MC_END();
2063 }
2064 else
2065 {
2066 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2067 IEMOP_HLP_NO_LOCK_PREFIX();
2068
2069 IEM_MC_BEGIN(0, 0);
2070 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2071 IEM_MC_ADVANCE_RIP();
2072 } IEM_MC_ELSE() {
2073 IEM_MC_REL_JMP_S32(i32Imm);
2074 } IEM_MC_ENDIF();
2075 IEM_MC_END();
2076 }
2077 return VINF_SUCCESS;
2078}
2079
2080
2081/** Opcode 0x0f 0x86. */
2082FNIEMOP_DEF(iemOp_jbe_Jv)
2083{
2084 IEMOP_MNEMONIC("jbe/jna Jv");
2085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2086 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2087 {
2088 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2089 IEMOP_HLP_NO_LOCK_PREFIX();
2090
2091 IEM_MC_BEGIN(0, 0);
2092 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2093 IEM_MC_REL_JMP_S16(i16Imm);
2094 } IEM_MC_ELSE() {
2095 IEM_MC_ADVANCE_RIP();
2096 } IEM_MC_ENDIF();
2097 IEM_MC_END();
2098 }
2099 else
2100 {
2101 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2102 IEMOP_HLP_NO_LOCK_PREFIX();
2103
2104 IEM_MC_BEGIN(0, 0);
2105 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2106 IEM_MC_REL_JMP_S32(i32Imm);
2107 } IEM_MC_ELSE() {
2108 IEM_MC_ADVANCE_RIP();
2109 } IEM_MC_ENDIF();
2110 IEM_MC_END();
2111 }
2112 return VINF_SUCCESS;
2113}
2114
2115
2116/** Opcode 0x0f 0x87. */
2117FNIEMOP_DEF(iemOp_jnbe_Jv)
2118{
2119 IEMOP_MNEMONIC("jnbe/ja Jv");
2120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2121 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2122 {
2123 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2124 IEMOP_HLP_NO_LOCK_PREFIX();
2125
2126 IEM_MC_BEGIN(0, 0);
2127 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2128 IEM_MC_ADVANCE_RIP();
2129 } IEM_MC_ELSE() {
2130 IEM_MC_REL_JMP_S16(i16Imm);
2131 } IEM_MC_ENDIF();
2132 IEM_MC_END();
2133 }
2134 else
2135 {
2136 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2137 IEMOP_HLP_NO_LOCK_PREFIX();
2138
2139 IEM_MC_BEGIN(0, 0);
2140 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2141 IEM_MC_ADVANCE_RIP();
2142 } IEM_MC_ELSE() {
2143 IEM_MC_REL_JMP_S32(i32Imm);
2144 } IEM_MC_ENDIF();
2145 IEM_MC_END();
2146 }
2147 return VINF_SUCCESS;
2148}
2149
2150
2151/** Opcode 0x0f 0x88. */
2152FNIEMOP_DEF(iemOp_js_Jv)
2153{
2154 IEMOP_MNEMONIC("js Jv");
2155 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2156 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2157 {
2158 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2159 IEMOP_HLP_NO_LOCK_PREFIX();
2160
2161 IEM_MC_BEGIN(0, 0);
2162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2163 IEM_MC_REL_JMP_S16(i16Imm);
2164 } IEM_MC_ELSE() {
2165 IEM_MC_ADVANCE_RIP();
2166 } IEM_MC_ENDIF();
2167 IEM_MC_END();
2168 }
2169 else
2170 {
2171 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2172 IEMOP_HLP_NO_LOCK_PREFIX();
2173
2174 IEM_MC_BEGIN(0, 0);
2175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2176 IEM_MC_REL_JMP_S32(i32Imm);
2177 } IEM_MC_ELSE() {
2178 IEM_MC_ADVANCE_RIP();
2179 } IEM_MC_ENDIF();
2180 IEM_MC_END();
2181 }
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x89. */
2187FNIEMOP_DEF(iemOp_jns_Jv)
2188{
2189 IEMOP_MNEMONIC("jns Jv");
2190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2191 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2192 {
2193 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2194 IEMOP_HLP_NO_LOCK_PREFIX();
2195
2196 IEM_MC_BEGIN(0, 0);
2197 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2198 IEM_MC_ADVANCE_RIP();
2199 } IEM_MC_ELSE() {
2200 IEM_MC_REL_JMP_S16(i16Imm);
2201 } IEM_MC_ENDIF();
2202 IEM_MC_END();
2203 }
2204 else
2205 {
2206 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2207 IEMOP_HLP_NO_LOCK_PREFIX();
2208
2209 IEM_MC_BEGIN(0, 0);
2210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2211 IEM_MC_ADVANCE_RIP();
2212 } IEM_MC_ELSE() {
2213 IEM_MC_REL_JMP_S32(i32Imm);
2214 } IEM_MC_ENDIF();
2215 IEM_MC_END();
2216 }
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/** Opcode 0x0f 0x8a. */
2222FNIEMOP_DEF(iemOp_jp_Jv)
2223{
2224 IEMOP_MNEMONIC("jp Jv");
2225 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2226 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2227 {
2228 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2229 IEMOP_HLP_NO_LOCK_PREFIX();
2230
2231 IEM_MC_BEGIN(0, 0);
2232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2233 IEM_MC_REL_JMP_S16(i16Imm);
2234 } IEM_MC_ELSE() {
2235 IEM_MC_ADVANCE_RIP();
2236 } IEM_MC_ENDIF();
2237 IEM_MC_END();
2238 }
2239 else
2240 {
2241 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2242 IEMOP_HLP_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(0, 0);
2245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2246 IEM_MC_REL_JMP_S32(i32Imm);
2247 } IEM_MC_ELSE() {
2248 IEM_MC_ADVANCE_RIP();
2249 } IEM_MC_ENDIF();
2250 IEM_MC_END();
2251 }
2252 return VINF_SUCCESS;
2253}
2254
2255
2256/** Opcode 0x0f 0x8b. */
2257FNIEMOP_DEF(iemOp_jnp_Jv)
2258{
2259 IEMOP_MNEMONIC("jo Jv");
2260 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2261 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2262 {
2263 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2264 IEMOP_HLP_NO_LOCK_PREFIX();
2265
2266 IEM_MC_BEGIN(0, 0);
2267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2268 IEM_MC_ADVANCE_RIP();
2269 } IEM_MC_ELSE() {
2270 IEM_MC_REL_JMP_S16(i16Imm);
2271 } IEM_MC_ENDIF();
2272 IEM_MC_END();
2273 }
2274 else
2275 {
2276 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2277 IEMOP_HLP_NO_LOCK_PREFIX();
2278
2279 IEM_MC_BEGIN(0, 0);
2280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2281 IEM_MC_ADVANCE_RIP();
2282 } IEM_MC_ELSE() {
2283 IEM_MC_REL_JMP_S32(i32Imm);
2284 } IEM_MC_ENDIF();
2285 IEM_MC_END();
2286 }
2287 return VINF_SUCCESS;
2288}
2289
2290
2291/** Opcode 0x0f 0x8c. */
2292FNIEMOP_DEF(iemOp_jl_Jv)
2293{
2294 IEMOP_MNEMONIC("jl/jnge Jv");
2295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2296 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2297 {
2298 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2299 IEMOP_HLP_NO_LOCK_PREFIX();
2300
2301 IEM_MC_BEGIN(0, 0);
2302 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2303 IEM_MC_REL_JMP_S16(i16Imm);
2304 } IEM_MC_ELSE() {
2305 IEM_MC_ADVANCE_RIP();
2306 } IEM_MC_ENDIF();
2307 IEM_MC_END();
2308 }
2309 else
2310 {
2311 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2312 IEMOP_HLP_NO_LOCK_PREFIX();
2313
2314 IEM_MC_BEGIN(0, 0);
2315 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2316 IEM_MC_REL_JMP_S32(i32Imm);
2317 } IEM_MC_ELSE() {
2318 IEM_MC_ADVANCE_RIP();
2319 } IEM_MC_ENDIF();
2320 IEM_MC_END();
2321 }
2322 return VINF_SUCCESS;
2323}
2324
2325
2326/** Opcode 0x0f 0x8d. */
2327FNIEMOP_DEF(iemOp_jnl_Jv)
2328{
2329 IEMOP_MNEMONIC("jnl/jge Jv");
2330 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2331 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2332 {
2333 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2334 IEMOP_HLP_NO_LOCK_PREFIX();
2335
2336 IEM_MC_BEGIN(0, 0);
2337 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2338 IEM_MC_ADVANCE_RIP();
2339 } IEM_MC_ELSE() {
2340 IEM_MC_REL_JMP_S16(i16Imm);
2341 } IEM_MC_ENDIF();
2342 IEM_MC_END();
2343 }
2344 else
2345 {
2346 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2347 IEMOP_HLP_NO_LOCK_PREFIX();
2348
2349 IEM_MC_BEGIN(0, 0);
2350 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2351 IEM_MC_ADVANCE_RIP();
2352 } IEM_MC_ELSE() {
2353 IEM_MC_REL_JMP_S32(i32Imm);
2354 } IEM_MC_ENDIF();
2355 IEM_MC_END();
2356 }
2357 return VINF_SUCCESS;
2358}
2359
2360
2361/** Opcode 0x0f 0x8e. */
2362FNIEMOP_DEF(iemOp_jle_Jv)
2363{
2364 IEMOP_MNEMONIC("jle/jng Jv");
2365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2366 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2367 {
2368 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2369 IEMOP_HLP_NO_LOCK_PREFIX();
2370
2371 IEM_MC_BEGIN(0, 0);
2372 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2373 IEM_MC_REL_JMP_S16(i16Imm);
2374 } IEM_MC_ELSE() {
2375 IEM_MC_ADVANCE_RIP();
2376 } IEM_MC_ENDIF();
2377 IEM_MC_END();
2378 }
2379 else
2380 {
2381 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2382 IEMOP_HLP_NO_LOCK_PREFIX();
2383
2384 IEM_MC_BEGIN(0, 0);
2385 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2386 IEM_MC_REL_JMP_S32(i32Imm);
2387 } IEM_MC_ELSE() {
2388 IEM_MC_ADVANCE_RIP();
2389 } IEM_MC_ENDIF();
2390 IEM_MC_END();
2391 }
2392 return VINF_SUCCESS;
2393}
2394
2395
2396/** Opcode 0x0f 0x8f. */
2397FNIEMOP_DEF(iemOp_jnle_Jv)
2398{
2399 IEMOP_MNEMONIC("jnle/jg Jv");
2400 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2401 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2402 {
2403 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2404 IEMOP_HLP_NO_LOCK_PREFIX();
2405
2406 IEM_MC_BEGIN(0, 0);
2407 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2408 IEM_MC_ADVANCE_RIP();
2409 } IEM_MC_ELSE() {
2410 IEM_MC_REL_JMP_S16(i16Imm);
2411 } IEM_MC_ENDIF();
2412 IEM_MC_END();
2413 }
2414 else
2415 {
2416 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2417 IEMOP_HLP_NO_LOCK_PREFIX();
2418
2419 IEM_MC_BEGIN(0, 0);
2420 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2421 IEM_MC_ADVANCE_RIP();
2422 } IEM_MC_ELSE() {
2423 IEM_MC_REL_JMP_S32(i32Imm);
2424 } IEM_MC_ENDIF();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/** Opcode 0x0f 0x90. */
2432FNIEMOP_DEF(iemOp_seto_Eb)
2433{
2434 IEMOP_MNEMONIC("seto Eb");
2435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2436 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2437
2438 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2439 * any way. AMD says it's "unused", whatever that means. We're
2440 * ignoring for now. */
2441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2442 {
2443 /* register target */
2444 IEM_MC_BEGIN(0, 0);
2445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2446 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2447 } IEM_MC_ELSE() {
2448 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2449 } IEM_MC_ENDIF();
2450 IEM_MC_ADVANCE_RIP();
2451 IEM_MC_END();
2452 }
2453 else
2454 {
2455 /* memory target */
2456 IEM_MC_BEGIN(0, 1);
2457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2460 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2461 } IEM_MC_ELSE() {
2462 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2463 } IEM_MC_ENDIF();
2464 IEM_MC_ADVANCE_RIP();
2465 IEM_MC_END();
2466 }
2467 return VINF_SUCCESS;
2468}
2469
2470
2471/** Opcode 0x0f 0x91. */
2472FNIEMOP_DEF(iemOp_setno_Eb)
2473{
2474 IEMOP_MNEMONIC("setno Eb");
2475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2476 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2477
2478 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2479 * any way. AMD says it's "unused", whatever that means. We're
2480 * ignoring for now. */
2481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2482 {
2483 /* register target */
2484 IEM_MC_BEGIN(0, 0);
2485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2487 } IEM_MC_ELSE() {
2488 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2489 } IEM_MC_ENDIF();
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /* memory target */
2496 IEM_MC_BEGIN(0, 1);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2499 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2500 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2501 } IEM_MC_ELSE() {
2502 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2503 } IEM_MC_ENDIF();
2504 IEM_MC_ADVANCE_RIP();
2505 IEM_MC_END();
2506 }
2507 return VINF_SUCCESS;
2508}
2509
2510
2511/** Opcode 0x0f 0x92. */
2512FNIEMOP_DEF(iemOp_setc_Eb)
2513{
2514 IEMOP_MNEMONIC("setc Eb");
2515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2516 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2517
2518 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2519 * any way. AMD says it's "unused", whatever that means. We're
2520 * ignoring for now. */
2521 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /* register target */
2524 IEM_MC_BEGIN(0, 0);
2525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2527 } IEM_MC_ELSE() {
2528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2529 } IEM_MC_ENDIF();
2530 IEM_MC_ADVANCE_RIP();
2531 IEM_MC_END();
2532 }
2533 else
2534 {
2535 /* memory target */
2536 IEM_MC_BEGIN(0, 1);
2537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2540 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2541 } IEM_MC_ELSE() {
2542 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2543 } IEM_MC_ENDIF();
2544 IEM_MC_ADVANCE_RIP();
2545 IEM_MC_END();
2546 }
2547 return VINF_SUCCESS;
2548}
2549
2550
2551/** Opcode 0x0f 0x93. */
2552FNIEMOP_DEF(iemOp_setnc_Eb)
2553{
2554 IEMOP_MNEMONIC("setnc Eb");
2555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2556 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2557
2558 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2559 * any way. AMD says it's "unused", whatever that means. We're
2560 * ignoring for now. */
2561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /* register target */
2564 IEM_MC_BEGIN(0, 0);
2565 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2566 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2567 } IEM_MC_ELSE() {
2568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2569 } IEM_MC_ENDIF();
2570 IEM_MC_ADVANCE_RIP();
2571 IEM_MC_END();
2572 }
2573 else
2574 {
2575 /* memory target */
2576 IEM_MC_BEGIN(0, 1);
2577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2580 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2581 } IEM_MC_ELSE() {
2582 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2583 } IEM_MC_ENDIF();
2584 IEM_MC_ADVANCE_RIP();
2585 IEM_MC_END();
2586 }
2587 return VINF_SUCCESS;
2588}
2589
2590
2591/** Opcode 0x0f 0x94. */
2592FNIEMOP_DEF(iemOp_sete_Eb)
2593{
2594 IEMOP_MNEMONIC("sete Eb");
2595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2596 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2597
2598 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2599 * any way. AMD says it's "unused", whatever that means. We're
2600 * ignoring for now. */
2601 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2602 {
2603 /* register target */
2604 IEM_MC_BEGIN(0, 0);
2605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2606 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2607 } IEM_MC_ELSE() {
2608 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2609 } IEM_MC_ENDIF();
2610 IEM_MC_ADVANCE_RIP();
2611 IEM_MC_END();
2612 }
2613 else
2614 {
2615 /* memory target */
2616 IEM_MC_BEGIN(0, 1);
2617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2620 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2621 } IEM_MC_ELSE() {
2622 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2623 } IEM_MC_ENDIF();
2624 IEM_MC_ADVANCE_RIP();
2625 IEM_MC_END();
2626 }
2627 return VINF_SUCCESS;
2628}
2629
2630
2631/** Opcode 0x0f 0x95. */
2632FNIEMOP_DEF(iemOp_setne_Eb)
2633{
2634 IEMOP_MNEMONIC("setne Eb");
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2637
2638 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2639 * any way. AMD says it's "unused", whatever that means. We're
2640 * ignoring for now. */
2641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2642 {
2643 /* register target */
2644 IEM_MC_BEGIN(0, 0);
2645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2646 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2647 } IEM_MC_ELSE() {
2648 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2649 } IEM_MC_ENDIF();
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 else
2654 {
2655 /* memory target */
2656 IEM_MC_BEGIN(0, 1);
2657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2659 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2660 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2661 } IEM_MC_ELSE() {
2662 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2663 } IEM_MC_ENDIF();
2664 IEM_MC_ADVANCE_RIP();
2665 IEM_MC_END();
2666 }
2667 return VINF_SUCCESS;
2668}
2669
2670
2671/** Opcode 0x0f 0x96. */
2672FNIEMOP_DEF(iemOp_setbe_Eb)
2673{
2674 IEMOP_MNEMONIC("setbe Eb");
2675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2676 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2677
2678 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2679 * any way. AMD says it's "unused", whatever that means. We're
2680 * ignoring for now. */
2681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2682 {
2683 /* register target */
2684 IEM_MC_BEGIN(0, 0);
2685 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2687 } IEM_MC_ELSE() {
2688 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2689 } IEM_MC_ENDIF();
2690 IEM_MC_ADVANCE_RIP();
2691 IEM_MC_END();
2692 }
2693 else
2694 {
2695 /* memory target */
2696 IEM_MC_BEGIN(0, 1);
2697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2699 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2700 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2701 } IEM_MC_ELSE() {
2702 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2703 } IEM_MC_ENDIF();
2704 IEM_MC_ADVANCE_RIP();
2705 IEM_MC_END();
2706 }
2707 return VINF_SUCCESS;
2708}
2709
2710
2711/** Opcode 0x0f 0x97. */
2712FNIEMOP_DEF(iemOp_setnbe_Eb)
2713{
2714 IEMOP_MNEMONIC("setnbe Eb");
2715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2716 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2717
2718 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2719 * any way. AMD says it's "unused", whatever that means. We're
2720 * ignoring for now. */
2721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2722 {
2723 /* register target */
2724 IEM_MC_BEGIN(0, 0);
2725 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2727 } IEM_MC_ELSE() {
2728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2729 } IEM_MC_ENDIF();
2730 IEM_MC_ADVANCE_RIP();
2731 IEM_MC_END();
2732 }
2733 else
2734 {
2735 /* memory target */
2736 IEM_MC_BEGIN(0, 1);
2737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2739 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2740 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2741 } IEM_MC_ELSE() {
2742 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2743 } IEM_MC_ENDIF();
2744 IEM_MC_ADVANCE_RIP();
2745 IEM_MC_END();
2746 }
2747 return VINF_SUCCESS;
2748}
2749
2750
2751/** Opcode 0x0f 0x98. */
2752FNIEMOP_DEF(iemOp_sets_Eb)
2753{
2754 IEMOP_MNEMONIC("sets Eb");
2755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2756 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2757
2758 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2759 * any way. AMD says it's "unused", whatever that means. We're
2760 * ignoring for now. */
2761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2762 {
2763 /* register target */
2764 IEM_MC_BEGIN(0, 0);
2765 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2767 } IEM_MC_ELSE() {
2768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2769 } IEM_MC_ENDIF();
2770 IEM_MC_ADVANCE_RIP();
2771 IEM_MC_END();
2772 }
2773 else
2774 {
2775 /* memory target */
2776 IEM_MC_BEGIN(0, 1);
2777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2780 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2781 } IEM_MC_ELSE() {
2782 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2783 } IEM_MC_ENDIF();
2784 IEM_MC_ADVANCE_RIP();
2785 IEM_MC_END();
2786 }
2787 return VINF_SUCCESS;
2788}
2789
2790
2791/** Opcode 0x0f 0x99. */
2792FNIEMOP_DEF(iemOp_setns_Eb)
2793{
2794 IEMOP_MNEMONIC("setns Eb");
2795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2796 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2797
2798 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2799 * any way. AMD says it's "unused", whatever that means. We're
2800 * ignoring for now. */
2801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2802 {
2803 /* register target */
2804 IEM_MC_BEGIN(0, 0);
2805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2807 } IEM_MC_ELSE() {
2808 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2809 } IEM_MC_ENDIF();
2810 IEM_MC_ADVANCE_RIP();
2811 IEM_MC_END();
2812 }
2813 else
2814 {
2815 /* memory target */
2816 IEM_MC_BEGIN(0, 1);
2817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2820 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2821 } IEM_MC_ELSE() {
2822 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2823 } IEM_MC_ENDIF();
2824 IEM_MC_ADVANCE_RIP();
2825 IEM_MC_END();
2826 }
2827 return VINF_SUCCESS;
2828}
2829
2830
2831/** Opcode 0x0f 0x9a. */
2832FNIEMOP_DEF(iemOp_setp_Eb)
2833{
2834 IEMOP_MNEMONIC("setnp Eb");
2835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2836 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2837
2838 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2839 * any way. AMD says it's "unused", whatever that means. We're
2840 * ignoring for now. */
2841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2842 {
2843 /* register target */
2844 IEM_MC_BEGIN(0, 0);
2845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2847 } IEM_MC_ELSE() {
2848 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2849 } IEM_MC_ENDIF();
2850 IEM_MC_ADVANCE_RIP();
2851 IEM_MC_END();
2852 }
2853 else
2854 {
2855 /* memory target */
2856 IEM_MC_BEGIN(0, 1);
2857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2860 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2861 } IEM_MC_ELSE() {
2862 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2863 } IEM_MC_ENDIF();
2864 IEM_MC_ADVANCE_RIP();
2865 IEM_MC_END();
2866 }
2867 return VINF_SUCCESS;
2868}
2869
2870
2871/** Opcode 0x0f 0x9b. */
2872FNIEMOP_DEF(iemOp_setnp_Eb)
2873{
2874 IEMOP_MNEMONIC("setnp Eb");
2875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2876 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2877
2878 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2879 * any way. AMD says it's "unused", whatever that means. We're
2880 * ignoring for now. */
2881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2882 {
2883 /* register target */
2884 IEM_MC_BEGIN(0, 0);
2885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2886 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2887 } IEM_MC_ELSE() {
2888 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2889 } IEM_MC_ENDIF();
2890 IEM_MC_ADVANCE_RIP();
2891 IEM_MC_END();
2892 }
2893 else
2894 {
2895 /* memory target */
2896 IEM_MC_BEGIN(0, 1);
2897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2900 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2901 } IEM_MC_ELSE() {
2902 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2903 } IEM_MC_ENDIF();
2904 IEM_MC_ADVANCE_RIP();
2905 IEM_MC_END();
2906 }
2907 return VINF_SUCCESS;
2908}
2909
2910
2911/** Opcode 0x0f 0x9c. */
2912FNIEMOP_DEF(iemOp_setl_Eb)
2913{
2914 IEMOP_MNEMONIC("setl Eb");
2915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2916 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2917
2918 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2919 * any way. AMD says it's "unused", whatever that means. We're
2920 * ignoring for now. */
2921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2922 {
2923 /* register target */
2924 IEM_MC_BEGIN(0, 0);
2925 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2926 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2927 } IEM_MC_ELSE() {
2928 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2929 } IEM_MC_ENDIF();
2930 IEM_MC_ADVANCE_RIP();
2931 IEM_MC_END();
2932 }
2933 else
2934 {
2935 /* memory target */
2936 IEM_MC_BEGIN(0, 1);
2937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2939 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2940 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2941 } IEM_MC_ELSE() {
2942 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2943 } IEM_MC_ENDIF();
2944 IEM_MC_ADVANCE_RIP();
2945 IEM_MC_END();
2946 }
2947 return VINF_SUCCESS;
2948}
2949
2950
2951/** Opcode 0x0f 0x9d. */
2952FNIEMOP_DEF(iemOp_setnl_Eb)
2953{
2954 IEMOP_MNEMONIC("setnl Eb");
2955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2956 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2957
2958 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2959 * any way. AMD says it's "unused", whatever that means. We're
2960 * ignoring for now. */
2961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2962 {
2963 /* register target */
2964 IEM_MC_BEGIN(0, 0);
2965 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2966 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2967 } IEM_MC_ELSE() {
2968 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2969 } IEM_MC_ENDIF();
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 else
2974 {
2975 /* memory target */
2976 IEM_MC_BEGIN(0, 1);
2977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2979 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2980 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2981 } IEM_MC_ELSE() {
2982 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2983 } IEM_MC_ENDIF();
2984 IEM_MC_ADVANCE_RIP();
2985 IEM_MC_END();
2986 }
2987 return VINF_SUCCESS;
2988}
2989
2990
2991/** Opcode 0x0f 0x9e. */
2992FNIEMOP_DEF(iemOp_setle_Eb)
2993{
2994 IEMOP_MNEMONIC("setle Eb");
2995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2996 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2997
2998 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2999 * any way. AMD says it's "unused", whatever that means. We're
3000 * ignoring for now. */
3001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3002 {
3003 /* register target */
3004 IEM_MC_BEGIN(0, 0);
3005 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3006 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3007 } IEM_MC_ELSE() {
3008 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3009 } IEM_MC_ENDIF();
3010 IEM_MC_ADVANCE_RIP();
3011 IEM_MC_END();
3012 }
3013 else
3014 {
3015 /* memory target */
3016 IEM_MC_BEGIN(0, 1);
3017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3019 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3020 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3021 } IEM_MC_ELSE() {
3022 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3023 } IEM_MC_ENDIF();
3024 IEM_MC_ADVANCE_RIP();
3025 IEM_MC_END();
3026 }
3027 return VINF_SUCCESS;
3028}
3029
3030
3031/** Opcode 0x0f 0x9f. */
3032FNIEMOP_DEF(iemOp_setnle_Eb)
3033{
3034 IEMOP_MNEMONIC("setnle Eb");
3035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3036 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3037
3038 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3039 * any way. AMD says it's "unused", whatever that means. We're
3040 * ignoring for now. */
3041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3042 {
3043 /* register target */
3044 IEM_MC_BEGIN(0, 0);
3045 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3046 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3047 } IEM_MC_ELSE() {
3048 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3049 } IEM_MC_ENDIF();
3050 IEM_MC_ADVANCE_RIP();
3051 IEM_MC_END();
3052 }
3053 else
3054 {
3055 /* memory target */
3056 IEM_MC_BEGIN(0, 1);
3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3059 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3060 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3061 } IEM_MC_ELSE() {
3062 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3063 } IEM_MC_ENDIF();
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070
3071/**
3072 * Common 'push segment-register' helper.
3073 */
3074FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
3075{
3076 IEMOP_HLP_NO_LOCK_PREFIX();
3077 if (iReg < X86_SREG_FS)
3078 IEMOP_HLP_NO_64BIT();
3079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3080
3081 switch (pIemCpu->enmEffOpSize)
3082 {
3083 case IEMMODE_16BIT:
3084 IEM_MC_BEGIN(0, 1);
3085 IEM_MC_LOCAL(uint16_t, u16Value);
3086 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
3087 IEM_MC_PUSH_U16(u16Value);
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 break;
3091
3092 case IEMMODE_32BIT:
3093 IEM_MC_BEGIN(0, 1);
3094 IEM_MC_LOCAL(uint32_t, u32Value);
3095 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
3096 IEM_MC_PUSH_U32(u32Value);
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 break;
3100
3101 case IEMMODE_64BIT:
3102 IEM_MC_BEGIN(0, 1);
3103 IEM_MC_LOCAL(uint64_t, u64Value);
3104 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
3105 IEM_MC_PUSH_U64(u64Value);
3106 IEM_MC_ADVANCE_RIP();
3107 IEM_MC_END();
3108 break;
3109 }
3110
3111 return VINF_SUCCESS;
3112}
3113
3114
3115/** Opcode 0x0f 0xa0. */
3116FNIEMOP_DEF(iemOp_push_fs)
3117{
3118 IEMOP_MNEMONIC("push fs");
3119 IEMOP_HLP_NO_LOCK_PREFIX();
3120 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
3121}
3122
3123
3124/** Opcode 0x0f 0xa1. */
3125FNIEMOP_DEF(iemOp_pop_fs)
3126{
3127 IEMOP_MNEMONIC("pop fs");
3128 IEMOP_HLP_NO_LOCK_PREFIX();
3129 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
3130}
3131
3132
3133/** Opcode 0x0f 0xa2. */
3134FNIEMOP_DEF(iemOp_cpuid)
3135{
3136 IEMOP_MNEMONIC("cpuid");
3137 IEMOP_HLP_NO_LOCK_PREFIX();
3138 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
3139}
3140
3141
3142/**
3143 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
3144 * iemOp_bts_Ev_Gv.
3145 */
3146FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
3147{
3148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3149 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3150
3151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3152 {
3153 /* register destination. */
3154 IEMOP_HLP_NO_LOCK_PREFIX();
3155 switch (pIemCpu->enmEffOpSize)
3156 {
3157 case IEMMODE_16BIT:
3158 IEM_MC_BEGIN(3, 0);
3159 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3160 IEM_MC_ARG(uint16_t, u16Src, 1);
3161 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3162
3163 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3164 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
3165 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3166 IEM_MC_REF_EFLAGS(pEFlags);
3167 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3168
3169 IEM_MC_ADVANCE_RIP();
3170 IEM_MC_END();
3171 return VINF_SUCCESS;
3172
3173 case IEMMODE_32BIT:
3174 IEM_MC_BEGIN(3, 0);
3175 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3176 IEM_MC_ARG(uint32_t, u32Src, 1);
3177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3178
3179 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3180 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
3181 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3182 IEM_MC_REF_EFLAGS(pEFlags);
3183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3184
3185 IEM_MC_ADVANCE_RIP();
3186 IEM_MC_END();
3187 return VINF_SUCCESS;
3188
3189 case IEMMODE_64BIT:
3190 IEM_MC_BEGIN(3, 0);
3191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3192 IEM_MC_ARG(uint64_t, u64Src, 1);
3193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3194
3195 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3196 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
3197 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3198 IEM_MC_REF_EFLAGS(pEFlags);
3199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3200
3201 IEM_MC_ADVANCE_RIP();
3202 IEM_MC_END();
3203 return VINF_SUCCESS;
3204
3205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3206 }
3207 }
3208 else
3209 {
3210 /* memory destination. */
3211
3212 uint32_t fAccess;
3213 if (pImpl->pfnLockedU16)
3214 fAccess = IEM_ACCESS_DATA_RW;
3215 else /* BT */
3216 {
3217 IEMOP_HLP_NO_LOCK_PREFIX();
3218 fAccess = IEM_ACCESS_DATA_R;
3219 }
3220
3221 /** @todo test negative bit offsets! */
3222 switch (pIemCpu->enmEffOpSize)
3223 {
3224 case IEMMODE_16BIT:
3225 IEM_MC_BEGIN(3, 2);
3226 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3227 IEM_MC_ARG(uint16_t, u16Src, 1);
3228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3230 IEM_MC_LOCAL(int16_t, i16AddrAdj);
3231
3232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3233 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3234 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
3235 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
3236 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
3237 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
3238 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
3239 IEM_MC_FETCH_EFLAGS(EFlags);
3240
3241 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3242 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3244 else
3245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3247
3248 IEM_MC_COMMIT_EFLAGS(EFlags);
3249 IEM_MC_ADVANCE_RIP();
3250 IEM_MC_END();
3251 return VINF_SUCCESS;
3252
3253 case IEMMODE_32BIT:
3254 IEM_MC_BEGIN(3, 2);
3255 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3256 IEM_MC_ARG(uint32_t, u32Src, 1);
3257 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3259 IEM_MC_LOCAL(int32_t, i32AddrAdj);
3260
3261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3262 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3263 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
3264 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
3265 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
3266 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
3267 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
3268 IEM_MC_FETCH_EFLAGS(EFlags);
3269
3270 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3271 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3273 else
3274 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3275 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3276
3277 IEM_MC_COMMIT_EFLAGS(EFlags);
3278 IEM_MC_ADVANCE_RIP();
3279 IEM_MC_END();
3280 return VINF_SUCCESS;
3281
3282 case IEMMODE_64BIT:
3283 IEM_MC_BEGIN(3, 2);
3284 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3285 IEM_MC_ARG(uint64_t, u64Src, 1);
3286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3288 IEM_MC_LOCAL(int64_t, i64AddrAdj);
3289
3290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3291 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3292 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
3293 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
3294 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
3295 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
3296 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
3297 IEM_MC_FETCH_EFLAGS(EFlags);
3298
3299 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3300 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3301 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3302 else
3303 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3304 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3305
3306 IEM_MC_COMMIT_EFLAGS(EFlags);
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 return VINF_SUCCESS;
3310
3311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3312 }
3313 }
3314}
3315
3316
3317/** Opcode 0x0f 0xa3. */
3318FNIEMOP_DEF(iemOp_bt_Ev_Gv)
3319{
3320 IEMOP_MNEMONIC("bt Gv,Gv");
3321 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
3322}
3323
3324
3325/**
3326 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
3327 */
3328FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
3329{
3330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3331 IEMOP_HLP_NO_LOCK_PREFIX();
3332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3333
3334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3335 {
3336 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3337 IEMOP_HLP_NO_LOCK_PREFIX();
3338
3339 switch (pIemCpu->enmEffOpSize)
3340 {
3341 case IEMMODE_16BIT:
3342 IEM_MC_BEGIN(4, 0);
3343 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3344 IEM_MC_ARG(uint16_t, u16Src, 1);
3345 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3346 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3347
3348 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3349 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3350 IEM_MC_REF_EFLAGS(pEFlags);
3351 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3352
3353 IEM_MC_ADVANCE_RIP();
3354 IEM_MC_END();
3355 return VINF_SUCCESS;
3356
3357 case IEMMODE_32BIT:
3358 IEM_MC_BEGIN(4, 0);
3359 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3360 IEM_MC_ARG(uint32_t, u32Src, 1);
3361 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3362 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3363
3364 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3365 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3366 IEM_MC_REF_EFLAGS(pEFlags);
3367 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3368
3369 IEM_MC_ADVANCE_RIP();
3370 IEM_MC_END();
3371 return VINF_SUCCESS;
3372
3373 case IEMMODE_64BIT:
3374 IEM_MC_BEGIN(4, 0);
3375 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3376 IEM_MC_ARG(uint64_t, u64Src, 1);
3377 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3378 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3379
3380 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3381 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3382 IEM_MC_REF_EFLAGS(pEFlags);
3383 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3384
3385 IEM_MC_ADVANCE_RIP();
3386 IEM_MC_END();
3387 return VINF_SUCCESS;
3388
3389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3390 }
3391 }
3392 else
3393 {
3394 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3395
3396 switch (pIemCpu->enmEffOpSize)
3397 {
3398 case IEMMODE_16BIT:
3399 IEM_MC_BEGIN(4, 2);
3400 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3401 IEM_MC_ARG(uint16_t, u16Src, 1);
3402 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3403 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3405
3406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3407 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3408 IEM_MC_ASSIGN(cShiftArg, cShift);
3409 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3410 IEM_MC_FETCH_EFLAGS(EFlags);
3411 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3412 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3413
3414 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3415 IEM_MC_COMMIT_EFLAGS(EFlags);
3416 IEM_MC_ADVANCE_RIP();
3417 IEM_MC_END();
3418 return VINF_SUCCESS;
3419
3420 case IEMMODE_32BIT:
3421 IEM_MC_BEGIN(4, 2);
3422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3423 IEM_MC_ARG(uint32_t, u32Src, 1);
3424 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3425 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3427
3428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3429 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3430 IEM_MC_ASSIGN(cShiftArg, cShift);
3431 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3432 IEM_MC_FETCH_EFLAGS(EFlags);
3433 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3434 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3435
3436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3437 IEM_MC_COMMIT_EFLAGS(EFlags);
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 return VINF_SUCCESS;
3441
3442 case IEMMODE_64BIT:
3443 IEM_MC_BEGIN(4, 2);
3444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3445 IEM_MC_ARG(uint64_t, u64Src, 1);
3446 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3449
3450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3451 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3452 IEM_MC_ASSIGN(cShiftArg, cShift);
3453 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3454 IEM_MC_FETCH_EFLAGS(EFlags);
3455 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3456 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3457
3458 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3459 IEM_MC_COMMIT_EFLAGS(EFlags);
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 return VINF_SUCCESS;
3463
3464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3465 }
3466 }
3467}
3468
3469
3470/**
3471 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
3472 */
3473FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
3474{
3475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3476 IEMOP_HLP_NO_LOCK_PREFIX();
3477 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3478
3479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3480 {
3481 IEMOP_HLP_NO_LOCK_PREFIX();
3482
3483 switch (pIemCpu->enmEffOpSize)
3484 {
3485 case IEMMODE_16BIT:
3486 IEM_MC_BEGIN(4, 0);
3487 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3488 IEM_MC_ARG(uint16_t, u16Src, 1);
3489 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3490 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3491
3492 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3493 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3494 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3495 IEM_MC_REF_EFLAGS(pEFlags);
3496 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3497
3498 IEM_MC_ADVANCE_RIP();
3499 IEM_MC_END();
3500 return VINF_SUCCESS;
3501
3502 case IEMMODE_32BIT:
3503 IEM_MC_BEGIN(4, 0);
3504 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3505 IEM_MC_ARG(uint32_t, u32Src, 1);
3506 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3507 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3508
3509 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3510 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3511 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3512 IEM_MC_REF_EFLAGS(pEFlags);
3513 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3514
3515 IEM_MC_ADVANCE_RIP();
3516 IEM_MC_END();
3517 return VINF_SUCCESS;
3518
3519 case IEMMODE_64BIT:
3520 IEM_MC_BEGIN(4, 0);
3521 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3522 IEM_MC_ARG(uint64_t, u64Src, 1);
3523 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3524 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3525
3526 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3527 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3528 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3529 IEM_MC_REF_EFLAGS(pEFlags);
3530 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3531
3532 IEM_MC_ADVANCE_RIP();
3533 IEM_MC_END();
3534 return VINF_SUCCESS;
3535
3536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3537 }
3538 }
3539 else
3540 {
3541 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3542
3543 switch (pIemCpu->enmEffOpSize)
3544 {
3545 case IEMMODE_16BIT:
3546 IEM_MC_BEGIN(4, 2);
3547 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3548 IEM_MC_ARG(uint16_t, u16Src, 1);
3549 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3554 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3555 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3556 IEM_MC_FETCH_EFLAGS(EFlags);
3557 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3558 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3559
3560 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3561 IEM_MC_COMMIT_EFLAGS(EFlags);
3562 IEM_MC_ADVANCE_RIP();
3563 IEM_MC_END();
3564 return VINF_SUCCESS;
3565
3566 case IEMMODE_32BIT:
3567 IEM_MC_BEGIN(4, 2);
3568 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3569 IEM_MC_ARG(uint32_t, u32Src, 1);
3570 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3571 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3573
3574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3575 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3576 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3577 IEM_MC_FETCH_EFLAGS(EFlags);
3578 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3579 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3580
3581 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3582 IEM_MC_COMMIT_EFLAGS(EFlags);
3583 IEM_MC_ADVANCE_RIP();
3584 IEM_MC_END();
3585 return VINF_SUCCESS;
3586
3587 case IEMMODE_64BIT:
3588 IEM_MC_BEGIN(4, 2);
3589 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3590 IEM_MC_ARG(uint64_t, u64Src, 1);
3591 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3592 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3594
3595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3596 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3597 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3598 IEM_MC_FETCH_EFLAGS(EFlags);
3599 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3600 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3601
3602 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3603 IEM_MC_COMMIT_EFLAGS(EFlags);
3604 IEM_MC_ADVANCE_RIP();
3605 IEM_MC_END();
3606 return VINF_SUCCESS;
3607
3608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3609 }
3610 }
3611}
3612
3613
3614
3615/** Opcode 0x0f 0xa4. */
3616FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
3617{
3618 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
3619 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
3620}
3621
3622
3623/** Opcode 0x0f 0xa7. */
3624FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
3625{
3626 IEMOP_MNEMONIC("shld Ev,Gv,CL");
3627 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
3628}
3629
3630
3631/** Opcode 0x0f 0xa8. */
3632FNIEMOP_DEF(iemOp_push_gs)
3633{
3634 IEMOP_MNEMONIC("push gs");
3635 IEMOP_HLP_NO_LOCK_PREFIX();
3636 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
3637}
3638
3639
3640/** Opcode 0x0f 0xa9. */
3641FNIEMOP_DEF(iemOp_pop_gs)
3642{
3643 IEMOP_MNEMONIC("pop gs");
3644 IEMOP_HLP_NO_LOCK_PREFIX();
3645 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
3646}
3647
3648
3649/** Opcode 0x0f 0xaa. */
3650FNIEMOP_STUB(iemOp_rsm);
3651
3652
3653/** Opcode 0x0f 0xab. */
3654FNIEMOP_DEF(iemOp_bts_Ev_Gv)
3655{
3656 IEMOP_MNEMONIC("bts Ev,Gv");
3657 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
3658}
3659
3660
3661/** Opcode 0x0f 0xac. */
3662FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
3663{
3664 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
3665 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
3666}
3667
3668
3669/** Opcode 0x0f 0xad. */
3670FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
3671{
3672 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
3673 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
3674}
3675
3676
3677/** Opcode 0x0f 0xae mem/0. */
3678FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
3679{
3680 IEMOP_MNEMONIC("fxsave m512");
3681 IEMOP_HLP_NO_LOCK_PREFIX();
3682 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3683 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3684
3685 IEM_MC_BEGIN(3, 1);
3686 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3687 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3688 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3690 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
3691 IEM_MC_END();
3692 return VINF_SUCCESS;
3693}
3694
3695
3696/** Opcode 0x0f 0xae mem/1. */
3697FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
3698{
3699 IEMOP_MNEMONIC("fxrstor m512");
3700 IEMOP_HLP_NO_LOCK_PREFIX();
3701 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3702 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3703
3704 IEM_MC_BEGIN(3, 1);
3705 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3706 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3707 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3709 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
3710 IEM_MC_END();
3711 return VINF_SUCCESS;
3712}
3713
3714
3715/** Opcode 0x0f 0xae mem/2. */
3716FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
3717
3718/** Opcode 0x0f 0xae mem/3. */
3719FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
3720
3721/** Opcode 0x0f 0xae mem/4. */
3722FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
3723
3724/** Opcode 0x0f 0xae mem/5. */
3725FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
3726
3727/** Opcode 0x0f 0xae mem/6. */
3728FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
3729
3730/** Opcode 0x0f 0xae mem/7. */
3731FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
3732
3733/** Opcode 0x0f 0xae 11b/5. */
3734FNIEMOP_STUB_1(iemOp_Grp15_lfence, uint8_t, bRm);
3735
3736/** Opcode 0x0f 0xae 11b/6. */
3737FNIEMOP_STUB_1(iemOp_Grp15_mfence, uint8_t, bRm);
3738
3739/** Opcode 0x0f 0xae 11b/7. */
3740FNIEMOP_STUB_1(iemOp_Grp15_sfence, uint8_t, bRm);
3741
3742/** Opcode 0xf3 0x0f 0xae 11b/0. */
3743FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
3744
3745/** Opcode 0xf3 0x0f 0xae 11b/1. */
3746FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
3747
3748/** Opcode 0xf3 0x0f 0xae 11b/2. */
3749FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
3750
3751/** Opcode 0xf3 0x0f 0xae 11b/3. */
3752FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
3753
3754
3755/** Opcode 0x0f 0xae. */
3756FNIEMOP_DEF(iemOp_Grp15)
3757{
3758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3759 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3760 {
3761 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3762 {
3763 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
3764 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
3765 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
3766 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
3767 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
3768 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
3769 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
3770 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
3771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3772 }
3773 }
3774 else
3775 {
3776 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
3777 {
3778 case 0:
3779 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3780 {
3781 case 0: return IEMOP_RAISE_INVALID_OPCODE();
3782 case 1: return IEMOP_RAISE_INVALID_OPCODE();
3783 case 2: return IEMOP_RAISE_INVALID_OPCODE();
3784 case 3: return IEMOP_RAISE_INVALID_OPCODE();
3785 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3786 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
3787 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
3788 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
3789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3790 }
3791 break;
3792
3793 case IEM_OP_PRF_REPZ:
3794 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3795 {
3796 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
3797 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
3798 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
3799 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
3800 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3801 case 5: return IEMOP_RAISE_INVALID_OPCODE();
3802 case 6: return IEMOP_RAISE_INVALID_OPCODE();
3803 case 7: return IEMOP_RAISE_INVALID_OPCODE();
3804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3805 }
3806 break;
3807
3808 default:
3809 return IEMOP_RAISE_INVALID_OPCODE();
3810 }
3811 }
3812}
3813
3814
3815/** Opcode 0x0f 0xaf. */
3816FNIEMOP_DEF(iemOp_imul_Gv_Ev)
3817{
3818 IEMOP_MNEMONIC("imul Gv,Ev");
3819 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3820 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
3821}
3822
3823
3824/** Opcode 0x0f 0xb0. */
3825FNIEMOP_STUB(iemOp_cmpxchg_Eb_Gb);
3826/** Opcode 0x0f 0xb1. */
3827FNIEMOP_STUB(iemOp_cmpxchg_Ev_Gv);
3828
3829
3830FNIEMOP_DEF_1(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg)
3831{
3832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3833 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3834
3835 /* The source cannot be a register. */
3836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3837 return IEMOP_RAISE_INVALID_OPCODE();
3838 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
3839
3840 switch (pIemCpu->enmEffOpSize)
3841 {
3842 case IEMMODE_16BIT:
3843 IEM_MC_BEGIN(5, 1);
3844 IEM_MC_ARG(uint16_t, uSel, 0);
3845 IEM_MC_ARG(uint16_t, offSeg, 1);
3846 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
3847 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
3848 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
3849 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
3850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3851 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
3852 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
3853 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
3854 IEM_MC_END();
3855 return VINF_SUCCESS;
3856
3857 case IEMMODE_32BIT:
3858 IEM_MC_BEGIN(5, 1);
3859 IEM_MC_ARG(uint16_t, uSel, 0);
3860 IEM_MC_ARG(uint32_t, offSeg, 1);
3861 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
3862 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
3863 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
3864 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
3865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3866 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
3867 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
3868 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
3869 IEM_MC_END();
3870 return VINF_SUCCESS;
3871
3872 case IEMMODE_64BIT:
3873 IEM_MC_BEGIN(5, 1);
3874 IEM_MC_ARG(uint16_t, uSel, 0);
3875 IEM_MC_ARG(uint64_t, offSeg, 1);
3876 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
3877 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
3878 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
3880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3881 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
3882 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
3883 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
3884 IEM_MC_END();
3885 return VINF_SUCCESS;
3886
3887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3888 }
3889}
3890
3891
3892/** Opcode 0x0f 0xb2. */
3893FNIEMOP_DEF(iemOp_lss_Gv_Mp)
3894{
3895 IEMOP_MNEMONIC("lss Gv,Mp");
3896 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_SS);
3897}
3898
3899
3900/** Opcode 0x0f 0xb3. */
3901FNIEMOP_DEF(iemOp_btr_Ev_Gv)
3902{
3903 IEMOP_MNEMONIC("btr Ev,Gv");
3904 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
3905}
3906
3907
3908/** Opcode 0x0f 0xb4. */
3909FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
3910{
3911 IEMOP_MNEMONIC("lfs Gv,Mp");
3912 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_FS);
3913}
3914
3915
3916/** Opcode 0x0f 0xb5. */
3917FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
3918{
3919 IEMOP_MNEMONIC("lgs Gv,Mp");
3920 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_GS);
3921}
3922
3923
3924/** Opcode 0x0f 0xb6. */
3925FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
3926{
3927 IEMOP_MNEMONIC("movzx Gv,Eb");
3928
3929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3930 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3931
3932 /*
3933 * If rm is denoting a register, no more instruction bytes.
3934 */
3935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3936 {
3937 switch (pIemCpu->enmEffOpSize)
3938 {
3939 case IEMMODE_16BIT:
3940 IEM_MC_BEGIN(0, 1);
3941 IEM_MC_LOCAL(uint16_t, u16Value);
3942 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3943 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
3944 IEM_MC_ADVANCE_RIP();
3945 IEM_MC_END();
3946 return VINF_SUCCESS;
3947
3948 case IEMMODE_32BIT:
3949 IEM_MC_BEGIN(0, 1);
3950 IEM_MC_LOCAL(uint32_t, u32Value);
3951 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3952 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 return VINF_SUCCESS;
3956
3957 case IEMMODE_64BIT:
3958 IEM_MC_BEGIN(0, 1);
3959 IEM_MC_LOCAL(uint64_t, u64Value);
3960 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3961 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
3962 IEM_MC_ADVANCE_RIP();
3963 IEM_MC_END();
3964 return VINF_SUCCESS;
3965
3966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3967 }
3968 }
3969 else
3970 {
3971 /*
3972 * We're loading a register from memory.
3973 */
3974 switch (pIemCpu->enmEffOpSize)
3975 {
3976 case IEMMODE_16BIT:
3977 IEM_MC_BEGIN(0, 2);
3978 IEM_MC_LOCAL(uint16_t, u16Value);
3979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3981 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
3982 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
3983 IEM_MC_ADVANCE_RIP();
3984 IEM_MC_END();
3985 return VINF_SUCCESS;
3986
3987 case IEMMODE_32BIT:
3988 IEM_MC_BEGIN(0, 2);
3989 IEM_MC_LOCAL(uint32_t, u32Value);
3990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3992 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
3993 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
3994 IEM_MC_ADVANCE_RIP();
3995 IEM_MC_END();
3996 return VINF_SUCCESS;
3997
3998 case IEMMODE_64BIT:
3999 IEM_MC_BEGIN(0, 2);
4000 IEM_MC_LOCAL(uint64_t, u64Value);
4001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4003 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4004 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4005 IEM_MC_ADVANCE_RIP();
4006 IEM_MC_END();
4007 return VINF_SUCCESS;
4008
4009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4010 }
4011 }
4012}
4013
4014
4015/** Opcode 0x0f 0xb7. */
4016FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
4017{
4018 IEMOP_MNEMONIC("movzx Gv,Ew");
4019
4020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4021 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4022
4023 /** @todo Not entirely sure how the operand size prefix is handled here,
4024 * assuming that it will be ignored. Would be nice to have a few
4025 * test for this. */
4026 /*
4027 * If rm is denoting a register, no more instruction bytes.
4028 */
4029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4030 {
4031 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4032 {
4033 IEM_MC_BEGIN(0, 1);
4034 IEM_MC_LOCAL(uint32_t, u32Value);
4035 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4036 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4037 IEM_MC_ADVANCE_RIP();
4038 IEM_MC_END();
4039 }
4040 else
4041 {
4042 IEM_MC_BEGIN(0, 1);
4043 IEM_MC_LOCAL(uint64_t, u64Value);
4044 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4045 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4046 IEM_MC_ADVANCE_RIP();
4047 IEM_MC_END();
4048 }
4049 }
4050 else
4051 {
4052 /*
4053 * We're loading a register from memory.
4054 */
4055 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4056 {
4057 IEM_MC_BEGIN(0, 2);
4058 IEM_MC_LOCAL(uint32_t, u32Value);
4059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4061 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4062 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4063 IEM_MC_ADVANCE_RIP();
4064 IEM_MC_END();
4065 }
4066 else
4067 {
4068 IEM_MC_BEGIN(0, 2);
4069 IEM_MC_LOCAL(uint64_t, u64Value);
4070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4072 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4073 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4074 IEM_MC_ADVANCE_RIP();
4075 IEM_MC_END();
4076 }
4077 }
4078 return VINF_SUCCESS;
4079}
4080
4081
4082/** Opcode 0x0f 0xb8. */
4083FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
4084
4085
4086/** Opcode 0x0f 0xb9. */
4087FNIEMOP_DEF(iemOp_Grp10)
4088{
4089 Log(("iemOp_Grp10 -> #UD\n"));
4090 return IEMOP_RAISE_INVALID_OPCODE();
4091}
4092
4093
4094/** Opcode 0x0f 0xba. */
4095FNIEMOP_DEF(iemOp_Grp8)
4096{
4097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4098 PCIEMOPBINSIZES pImpl;
4099 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
4100 {
4101 case 0: case 1: case 2: case 3:
4102 return IEMOP_RAISE_INVALID_OPCODE();
4103 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
4104 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
4105 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
4106 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
4107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4108 }
4109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4110
4111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4112 {
4113 /* register destination. */
4114 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4115 IEMOP_HLP_NO_LOCK_PREFIX();
4116
4117 switch (pIemCpu->enmEffOpSize)
4118 {
4119 case IEMMODE_16BIT:
4120 IEM_MC_BEGIN(3, 0);
4121 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4122 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
4123 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4124
4125 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4126 IEM_MC_REF_EFLAGS(pEFlags);
4127 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4128
4129 IEM_MC_ADVANCE_RIP();
4130 IEM_MC_END();
4131 return VINF_SUCCESS;
4132
4133 case IEMMODE_32BIT:
4134 IEM_MC_BEGIN(3, 0);
4135 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4136 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
4137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4138
4139 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4140 IEM_MC_REF_EFLAGS(pEFlags);
4141 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4142
4143 IEM_MC_ADVANCE_RIP();
4144 IEM_MC_END();
4145 return VINF_SUCCESS;
4146
4147 case IEMMODE_64BIT:
4148 IEM_MC_BEGIN(3, 0);
4149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4150 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
4151 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4152
4153 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4154 IEM_MC_REF_EFLAGS(pEFlags);
4155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4156
4157 IEM_MC_ADVANCE_RIP();
4158 IEM_MC_END();
4159 return VINF_SUCCESS;
4160
4161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4162 }
4163 }
4164 else
4165 {
4166 /* memory destination. */
4167
4168 uint32_t fAccess;
4169 if (pImpl->pfnLockedU16)
4170 fAccess = IEM_ACCESS_DATA_RW;
4171 else /* BT */
4172 {
4173 IEMOP_HLP_NO_LOCK_PREFIX();
4174 fAccess = IEM_ACCESS_DATA_R;
4175 }
4176
4177 /** @todo test negative bit offsets! */
4178 switch (pIemCpu->enmEffOpSize)
4179 {
4180 case IEMMODE_16BIT:
4181 IEM_MC_BEGIN(3, 1);
4182 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4183 IEM_MC_ARG(uint16_t, u16Src, 1);
4184 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4186
4187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4188 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4189 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
4190 IEM_MC_FETCH_EFLAGS(EFlags);
4191 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4192 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4193 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4194 else
4195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4196 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4197
4198 IEM_MC_COMMIT_EFLAGS(EFlags);
4199 IEM_MC_ADVANCE_RIP();
4200 IEM_MC_END();
4201 return VINF_SUCCESS;
4202
4203 case IEMMODE_32BIT:
4204 IEM_MC_BEGIN(3, 1);
4205 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4206 IEM_MC_ARG(uint32_t, u32Src, 1);
4207 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4209
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4211 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4212 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
4213 IEM_MC_FETCH_EFLAGS(EFlags);
4214 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4215 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4217 else
4218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4219 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4220
4221 IEM_MC_COMMIT_EFLAGS(EFlags);
4222 IEM_MC_ADVANCE_RIP();
4223 IEM_MC_END();
4224 return VINF_SUCCESS;
4225
4226 case IEMMODE_64BIT:
4227 IEM_MC_BEGIN(3, 1);
4228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4229 IEM_MC_ARG(uint64_t, u64Src, 1);
4230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4232
4233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4234 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4235 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
4236 IEM_MC_FETCH_EFLAGS(EFlags);
4237 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4238 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4240 else
4241 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4243
4244 IEM_MC_COMMIT_EFLAGS(EFlags);
4245 IEM_MC_ADVANCE_RIP();
4246 IEM_MC_END();
4247 return VINF_SUCCESS;
4248
4249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4250 }
4251 }
4252
4253}
4254
4255
4256/** Opcode 0x0f 0xbb. */
4257FNIEMOP_DEF(iemOp_btc_Ev_Gv)
4258{
4259 IEMOP_MNEMONIC("btc Ev,Gv");
4260 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
4261}
4262
4263
4264/** Opcode 0x0f 0xbc. */
4265FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
4266{
4267 IEMOP_MNEMONIC("bsf Gv,Ev");
4268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4269 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
4270}
4271
4272
4273/** Opcode 0x0f 0xbd. */
4274FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
4275{
4276 IEMOP_MNEMONIC("bsr Gv,Ev");
4277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4278 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
4279}
4280
4281
4282/** Opcode 0x0f 0xbe. */
4283FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
4284{
4285 IEMOP_MNEMONIC("movsx Gv,Eb");
4286
4287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4288 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4289
4290 /*
4291 * If rm is denoting a register, no more instruction bytes.
4292 */
4293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4294 {
4295 switch (pIemCpu->enmEffOpSize)
4296 {
4297 case IEMMODE_16BIT:
4298 IEM_MC_BEGIN(0, 1);
4299 IEM_MC_LOCAL(uint16_t, u16Value);
4300 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4301 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4302 IEM_MC_ADVANCE_RIP();
4303 IEM_MC_END();
4304 return VINF_SUCCESS;
4305
4306 case IEMMODE_32BIT:
4307 IEM_MC_BEGIN(0, 1);
4308 IEM_MC_LOCAL(uint32_t, u32Value);
4309 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4310 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4311 IEM_MC_ADVANCE_RIP();
4312 IEM_MC_END();
4313 return VINF_SUCCESS;
4314
4315 case IEMMODE_64BIT:
4316 IEM_MC_BEGIN(0, 1);
4317 IEM_MC_LOCAL(uint64_t, u64Value);
4318 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4319 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 return VINF_SUCCESS;
4323
4324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4325 }
4326 }
4327 else
4328 {
4329 /*
4330 * We're loading a register from memory.
4331 */
4332 switch (pIemCpu->enmEffOpSize)
4333 {
4334 case IEMMODE_16BIT:
4335 IEM_MC_BEGIN(0, 2);
4336 IEM_MC_LOCAL(uint16_t, u16Value);
4337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4339 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4340 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4341 IEM_MC_ADVANCE_RIP();
4342 IEM_MC_END();
4343 return VINF_SUCCESS;
4344
4345 case IEMMODE_32BIT:
4346 IEM_MC_BEGIN(0, 2);
4347 IEM_MC_LOCAL(uint32_t, u32Value);
4348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4350 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4351 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4352 IEM_MC_ADVANCE_RIP();
4353 IEM_MC_END();
4354 return VINF_SUCCESS;
4355
4356 case IEMMODE_64BIT:
4357 IEM_MC_BEGIN(0, 2);
4358 IEM_MC_LOCAL(uint64_t, u64Value);
4359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4361 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4362 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4363 IEM_MC_ADVANCE_RIP();
4364 IEM_MC_END();
4365 return VINF_SUCCESS;
4366
4367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4368 }
4369 }
4370}
4371
4372
4373/** Opcode 0x0f 0xbf. */
4374FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
4375{
4376 IEMOP_MNEMONIC("movsx Gv,Ew");
4377
4378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4379 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4380
4381 /** @todo Not entirely sure how the operand size prefix is handled here,
4382 * assuming that it will be ignored. Would be nice to have a few
4383 * test for this. */
4384 /*
4385 * If rm is denoting a register, no more instruction bytes.
4386 */
4387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4388 {
4389 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4390 {
4391 IEM_MC_BEGIN(0, 1);
4392 IEM_MC_LOCAL(uint32_t, u32Value);
4393 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4394 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4395 IEM_MC_ADVANCE_RIP();
4396 IEM_MC_END();
4397 }
4398 else
4399 {
4400 IEM_MC_BEGIN(0, 1);
4401 IEM_MC_LOCAL(uint64_t, u64Value);
4402 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4403 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4404 IEM_MC_ADVANCE_RIP();
4405 IEM_MC_END();
4406 }
4407 }
4408 else
4409 {
4410 /*
4411 * We're loading a register from memory.
4412 */
4413 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4414 {
4415 IEM_MC_BEGIN(0, 2);
4416 IEM_MC_LOCAL(uint32_t, u32Value);
4417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4419 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4420 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4421 IEM_MC_ADVANCE_RIP();
4422 IEM_MC_END();
4423 }
4424 else
4425 {
4426 IEM_MC_BEGIN(0, 2);
4427 IEM_MC_LOCAL(uint64_t, u64Value);
4428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4430 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4431 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4432 IEM_MC_ADVANCE_RIP();
4433 IEM_MC_END();
4434 }
4435 }
4436 return VINF_SUCCESS;
4437}
4438
4439
4440/** Opcode 0x0f 0xc0. */
4441FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
4442{
4443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4444 IEMOP_MNEMONIC("xadd Eb,Gb");
4445
4446 /*
4447 * If rm is denoting a register, no more instruction bytes.
4448 */
4449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4450 {
4451 IEMOP_HLP_NO_LOCK_PREFIX();
4452
4453 IEM_MC_BEGIN(3, 0);
4454 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4455 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4456 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4457
4458 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4459 IEM_MC_REF_GREG_U8(pu8Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4460 IEM_MC_REF_EFLAGS(pEFlags);
4461 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4462
4463 IEM_MC_ADVANCE_RIP();
4464 IEM_MC_END();
4465 }
4466 else
4467 {
4468 /*
4469 * We're accessing memory.
4470 */
4471 IEM_MC_BEGIN(3, 3);
4472 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4473 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4474 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4475 IEM_MC_LOCAL(uint8_t, u8RegCopy);
4476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4477
4478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4479 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4480 IEM_MC_FETCH_GREG_U8(u8RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4481 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
4482 IEM_MC_FETCH_EFLAGS(EFlags);
4483 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4484 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4485 else
4486 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
4487
4488 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4489 IEM_MC_COMMIT_EFLAGS(EFlags);
4490 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8RegCopy);
4491 IEM_MC_ADVANCE_RIP();
4492 IEM_MC_END();
4493 return VINF_SUCCESS;
4494 }
4495 return VINF_SUCCESS;
4496}
4497
4498
4499/** Opcode 0x0f 0xc1. */
4500FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
4501{
4502 IEMOP_MNEMONIC("xadd Ev,Gv");
4503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4504
4505 /*
4506 * If rm is denoting a register, no more instruction bytes.
4507 */
4508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4509 {
4510 IEMOP_HLP_NO_LOCK_PREFIX();
4511
4512 switch (pIemCpu->enmEffOpSize)
4513 {
4514 case IEMMODE_16BIT:
4515 IEM_MC_BEGIN(3, 0);
4516 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4517 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4519
4520 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4521 IEM_MC_REF_GREG_U16(pu16Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4522 IEM_MC_REF_EFLAGS(pEFlags);
4523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4524
4525 IEM_MC_ADVANCE_RIP();
4526 IEM_MC_END();
4527 return VINF_SUCCESS;
4528
4529 case IEMMODE_32BIT:
4530 IEM_MC_BEGIN(3, 0);
4531 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4532 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4534
4535 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4536 IEM_MC_REF_GREG_U32(pu32Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4537 IEM_MC_REF_EFLAGS(pEFlags);
4538 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
4539
4540 IEM_MC_ADVANCE_RIP();
4541 IEM_MC_END();
4542 return VINF_SUCCESS;
4543
4544 case IEMMODE_64BIT:
4545 IEM_MC_BEGIN(3, 0);
4546 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4547 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4548 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4549
4550 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4551 IEM_MC_REF_GREG_U64(pu64Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4552 IEM_MC_REF_EFLAGS(pEFlags);
4553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
4554
4555 IEM_MC_ADVANCE_RIP();
4556 IEM_MC_END();
4557 return VINF_SUCCESS;
4558
4559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4560 }
4561 }
4562 else
4563 {
4564 /*
4565 * We're accessing memory.
4566 */
4567 switch (pIemCpu->enmEffOpSize)
4568 {
4569 case IEMMODE_16BIT:
4570 IEM_MC_BEGIN(3, 3);
4571 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4572 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4573 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4574 IEM_MC_LOCAL(uint16_t, u16RegCopy);
4575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4576
4577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4578 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4579 IEM_MC_FETCH_GREG_U16(u16RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4580 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
4581 IEM_MC_FETCH_EFLAGS(EFlags);
4582 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4583 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4584 else
4585 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
4586
4587 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4588 IEM_MC_COMMIT_EFLAGS(EFlags);
4589 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16RegCopy);
4590 IEM_MC_ADVANCE_RIP();
4591 IEM_MC_END();
4592 return VINF_SUCCESS;
4593
4594 case IEMMODE_32BIT:
4595 IEM_MC_BEGIN(3, 3);
4596 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4597 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4598 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4599 IEM_MC_LOCAL(uint32_t, u32RegCopy);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4601
4602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4603 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4604 IEM_MC_FETCH_GREG_U32(u32RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4605 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
4606 IEM_MC_FETCH_EFLAGS(EFlags);
4607 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4608 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
4609 else
4610 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
4611
4612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4613 IEM_MC_COMMIT_EFLAGS(EFlags);
4614 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32RegCopy);
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 return VINF_SUCCESS;
4618
4619 case IEMMODE_64BIT:
4620 IEM_MC_BEGIN(3, 3);
4621 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4622 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4623 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4624 IEM_MC_LOCAL(uint64_t, u64RegCopy);
4625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4626
4627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4628 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4629 IEM_MC_FETCH_GREG_U64(u64RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4630 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
4631 IEM_MC_FETCH_EFLAGS(EFlags);
4632 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4633 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
4634 else
4635 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
4636
4637 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4638 IEM_MC_COMMIT_EFLAGS(EFlags);
4639 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64RegCopy);
4640 IEM_MC_ADVANCE_RIP();
4641 IEM_MC_END();
4642 return VINF_SUCCESS;
4643
4644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4645 }
4646 }
4647}
4648
4649/** Opcode 0x0f 0xc2. */
4650FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
4651
4652/** Opcode 0x0f 0xc3. */
4653FNIEMOP_STUB(iemOp_movnti_My_Gy);
4654
4655/** Opcode 0x0f 0xc4. */
4656FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
4657
4658/** Opcode 0x0f 0xc5. */
4659FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
4660
4661/** Opcode 0x0f 0xc6. */
4662FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
4663
4664/** Opcode 0x0f 0xc7 !11/1. */
4665FNIEMOP_STUB_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm);
4666
4667/** Opcode REX.W 0x0f 0xc7 !11/1. */
4668FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
4669
4670/** Opcode 0x0f 0xc7 11/6. */
4671FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
4672
4673/** Opcode 0x0f 0xc7 !11/6. */
4674FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
4675
4676/** Opcode 0x66 0x0f 0xc7 !11/6. */
4677FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
4678
4679/** Opcode 0xf3 0x0f 0xc7 !11/6. */
4680FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
4681
4682/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
4683FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
4684
4685
4686/** Opcode 0x0f 0xc7. */
4687FNIEMOP_DEF(iemOp_Grp9)
4688{
4689 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
4690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4691 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
4692 {
4693 case 0: case 2: case 3: case 4: case 5:
4694 return IEMOP_RAISE_INVALID_OPCODE();
4695 case 1:
4696 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
4697 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
4698 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
4699 return IEMOP_RAISE_INVALID_OPCODE();
4700 if (bRm & IEM_OP_PRF_SIZE_REX_W)
4701 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
4702 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
4703 case 6:
4704 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4705 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
4706 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
4707 {
4708 case 0:
4709 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
4710 case IEM_OP_PRF_SIZE_OP:
4711 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
4712 case IEM_OP_PRF_REPZ:
4713 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
4714 default:
4715 return IEMOP_RAISE_INVALID_OPCODE();
4716 }
4717 case 7:
4718 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
4719 {
4720 case 0:
4721 case IEM_OP_PRF_REPZ:
4722 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
4723 default:
4724 return IEMOP_RAISE_INVALID_OPCODE();
4725 }
4726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4727 }
4728}
4729
4730
4731/**
4732 * Common 'bswap register' helper.
4733 */
4734FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
4735{
4736 IEMOP_HLP_NO_LOCK_PREFIX();
4737 switch (pIemCpu->enmEffOpSize)
4738 {
4739 case IEMMODE_16BIT:
4740 IEM_MC_BEGIN(1, 0);
4741 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4742 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
4743 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
4744 IEM_MC_ADVANCE_RIP();
4745 IEM_MC_END();
4746 return VINF_SUCCESS;
4747
4748 case IEMMODE_32BIT:
4749 IEM_MC_BEGIN(1, 0);
4750 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4751 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
4752 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4753 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
4754 IEM_MC_ADVANCE_RIP();
4755 IEM_MC_END();
4756 return VINF_SUCCESS;
4757
4758 case IEMMODE_64BIT:
4759 IEM_MC_BEGIN(1, 0);
4760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4761 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
4762 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
4763 IEM_MC_ADVANCE_RIP();
4764 IEM_MC_END();
4765 return VINF_SUCCESS;
4766
4767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4768 }
4769}
4770
4771
4772/** Opcode 0x0f 0xc8. */
4773FNIEMOP_DEF(iemOp_bswap_rAX_r8)
4774{
4775 IEMOP_MNEMONIC("bswap rAX/r8");
4776 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexReg);
4777}
4778
4779
4780/** Opcode 0x0f 0xc9. */
4781FNIEMOP_DEF(iemOp_bswap_rCX_r9)
4782{
4783 IEMOP_MNEMONIC("bswap rCX/r9");
4784 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexReg);
4785}
4786
4787
4788/** Opcode 0x0f 0xca. */
4789FNIEMOP_DEF(iemOp_bswap_rDX_r10)
4790{
4791 IEMOP_MNEMONIC("bswap rDX/r9");
4792 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexReg);
4793}
4794
4795
4796/** Opcode 0x0f 0xcb. */
4797FNIEMOP_DEF(iemOp_bswap_rBX_r11)
4798{
4799 IEMOP_MNEMONIC("bswap rBX/r9");
4800 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexReg);
4801}
4802
4803
4804/** Opcode 0x0f 0xcc. */
4805FNIEMOP_DEF(iemOp_bswap_rSP_r12)
4806{
4807 IEMOP_MNEMONIC("bswap rSP/r12");
4808 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexReg);
4809}
4810
4811
4812/** Opcode 0x0f 0xcd. */
4813FNIEMOP_DEF(iemOp_bswap_rBP_r13)
4814{
4815 IEMOP_MNEMONIC("bswap rBP/r13");
4816 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexReg);
4817}
4818
4819
4820/** Opcode 0x0f 0xce. */
4821FNIEMOP_DEF(iemOp_bswap_rSI_r14)
4822{
4823 IEMOP_MNEMONIC("bswap rSI/r14");
4824 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexReg);
4825}
4826
4827
4828/** Opcode 0x0f 0xcf. */
4829FNIEMOP_DEF(iemOp_bswap_rDI_r15)
4830{
4831 IEMOP_MNEMONIC("bswap rDI/r15");
4832 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexReg);
4833}
4834
4835
4836
4837/** Opcode 0x0f 0xd0. */
4838FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
4839/** Opcode 0x0f 0xd1. */
4840FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
4841/** Opcode 0x0f 0xd2. */
4842FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
4843/** Opcode 0x0f 0xd3. */
4844FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
4845/** Opcode 0x0f 0xd4. */
4846FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
4847/** Opcode 0x0f 0xd5. */
4848FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
4849/** Opcode 0x0f 0xd6. */
4850FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
4851/** Opcode 0x0f 0xd7. */
4852FNIEMOP_STUB(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq);
4853/** Opcode 0x0f 0xd8. */
4854FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
4855/** Opcode 0x0f 0xd9. */
4856FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
4857/** Opcode 0x0f 0xda. */
4858FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
4859/** Opcode 0x0f 0xdb. */
4860FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
4861/** Opcode 0x0f 0xdc. */
4862FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
4863/** Opcode 0x0f 0xdd. */
4864FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
4865/** Opcode 0x0f 0xde. */
4866FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
4867/** Opcode 0x0f 0xdf. */
4868FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
4869/** Opcode 0x0f 0xe0. */
4870FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
4871/** Opcode 0x0f 0xe1. */
4872FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
4873/** Opcode 0x0f 0xe2. */
4874FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
4875/** Opcode 0x0f 0xe3. */
4876FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
4877/** Opcode 0x0f 0xe4. */
4878FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
4879/** Opcode 0x0f 0xe5. */
4880FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
4881/** Opcode 0x0f 0xe6. */
4882FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
4883/** Opcode 0x0f 0xe7. */
4884FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
4885/** Opcode 0x0f 0xe8. */
4886FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
4887/** Opcode 0x0f 0xe9. */
4888FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
4889/** Opcode 0x0f 0xea. */
4890FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
4891/** Opcode 0x0f 0xeb. */
4892FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
4893/** Opcode 0x0f 0xec. */
4894FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
4895/** Opcode 0x0f 0xed. */
4896FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
4897/** Opcode 0x0f 0xee. */
4898FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
4899/** Opcode 0x0f 0xef. */
4900FNIEMOP_STUB(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq);
4901/** Opcode 0x0f 0xf0. */
4902FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
4903/** Opcode 0x0f 0xf1. */
4904FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
4905/** Opcode 0x0f 0xf2. */
4906FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
4907/** Opcode 0x0f 0xf3. */
4908FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
4909/** Opcode 0x0f 0xf4. */
4910FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
4911/** Opcode 0x0f 0xf5. */
4912FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
4913/** Opcode 0x0f 0xf6. */
4914FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
4915/** Opcode 0x0f 0xf7. */
4916FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
4917/** Opcode 0x0f 0xf8. */
4918FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq);
4919/** Opcode 0x0f 0xf9. */
4920FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
4921/** Opcode 0x0f 0xfa. */
4922FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
4923/** Opcode 0x0f 0xfb. */
4924FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
4925/** Opcode 0x0f 0xfc. */
4926FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
4927/** Opcode 0x0f 0xfd. */
4928FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
4929/** Opcode 0x0f 0xfe. */
4930FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
4931
4932
4933const PFNIEMOP g_apfnTwoByteMap[256] =
4934{
4935 /* 0x00 */ iemOp_Grp6,
4936 /* 0x01 */ iemOp_Grp7,
4937 /* 0x02 */ iemOp_lar_Gv_Ew,
4938 /* 0x03 */ iemOp_lsl_Gv_Ew,
4939 /* 0x04 */ iemOp_Invalid,
4940 /* 0x05 */ iemOp_syscall,
4941 /* 0x06 */ iemOp_clts,
4942 /* 0x07 */ iemOp_sysret,
4943 /* 0x08 */ iemOp_invd,
4944 /* 0x09 */ iemOp_wbinvd,
4945 /* 0x0a */ iemOp_Invalid,
4946 /* 0x0b */ iemOp_ud2,
4947 /* 0x0c */ iemOp_Invalid,
4948 /* 0x0d */ iemOp_nop_Ev_GrpP,
4949 /* 0x0e */ iemOp_femms,
4950 /* 0x0f */ iemOp_3Dnow,
4951 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
4952 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
4953 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
4954 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
4955 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
4956 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
4957 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
4958 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
4959 /* 0x18 */ iemOp_prefetch_Grp16,
4960 /* 0x19 */ iemOp_nop_Ev,
4961 /* 0x1a */ iemOp_nop_Ev,
4962 /* 0x1b */ iemOp_nop_Ev,
4963 /* 0x1c */ iemOp_nop_Ev,
4964 /* 0x1d */ iemOp_nop_Ev,
4965 /* 0x1e */ iemOp_nop_Ev,
4966 /* 0x1f */ iemOp_nop_Ev,
4967 /* 0x20 */ iemOp_mov_Rd_Cd,
4968 /* 0x21 */ iemOp_mov_Rd_Dd,
4969 /* 0x22 */ iemOp_mov_Cd_Rd,
4970 /* 0x23 */ iemOp_mov_Dd_Rd,
4971 /* 0x24 */ iemOp_mov_Rd_Td,
4972 /* 0x25 */ iemOp_Invalid,
4973 /* 0x26 */ iemOp_mov_Td_Rd,
4974 /* 0x27 */ iemOp_Invalid,
4975 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
4976 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
4977 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
4978 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
4979 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
4980 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
4981 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
4982 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
4983 /* 0x30 */ iemOp_wrmsr,
4984 /* 0x31 */ iemOp_rdtsc,
4985 /* 0x32 */ iemOp_rdmsr,
4986 /* 0x33 */ iemOp_rdpmc,
4987 /* 0x34 */ iemOp_sysenter,
4988 /* 0x35 */ iemOp_sysexit,
4989 /* 0x36 */ iemOp_Invalid,
4990 /* 0x37 */ iemOp_getsec,
4991 /* 0x38 */ iemOp_3byte_Esc_A4,
4992 /* 0x39 */ iemOp_Invalid,
4993 /* 0x3a */ iemOp_3byte_Esc_A5,
4994 /* 0x3b */ iemOp_Invalid,
4995 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
4996 /* 0x3d */ iemOp_Invalid,
4997 /* 0x3e */ iemOp_Invalid,
4998 /* 0x3f */ iemOp_Invalid,
4999 /* 0x40 */ iemOp_cmovo_Gv_Ev,
5000 /* 0x41 */ iemOp_cmovno_Gv_Ev,
5001 /* 0x42 */ iemOp_cmovc_Gv_Ev,
5002 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
5003 /* 0x44 */ iemOp_cmove_Gv_Ev,
5004 /* 0x45 */ iemOp_cmovne_Gv_Ev,
5005 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
5006 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
5007 /* 0x48 */ iemOp_cmovs_Gv_Ev,
5008 /* 0x49 */ iemOp_cmovns_Gv_Ev,
5009 /* 0x4a */ iemOp_cmovp_Gv_Ev,
5010 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
5011 /* 0x4c */ iemOp_cmovl_Gv_Ev,
5012 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
5013 /* 0x4e */ iemOp_cmovle_Gv_Ev,
5014 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
5015 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
5016 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
5017 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
5018 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
5019 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
5020 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
5021 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
5022 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
5023 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
5024 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
5025 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
5026 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
5027 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
5028 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
5029 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
5030 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
5031 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
5032 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
5033 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
5034 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
5035 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
5036 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
5037 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
5038 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
5039 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
5040 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
5041 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
5042 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
5043 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
5044 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
5045 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
5046 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
5047 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
5048 /* 0x71 */ iemOp_Grp12,
5049 /* 0x72 */ iemOp_Grp13,
5050 /* 0x73 */ iemOp_Grp14,
5051 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
5052 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
5053 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
5054 /* 0x77 */ iemOp_emms,
5055 /* 0x78 */ iemOp_vmread_AmdGrp17,
5056 /* 0x79 */ iemOp_vmwrite,
5057 /* 0x7a */ iemOp_Invalid,
5058 /* 0x7b */ iemOp_Invalid,
5059 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
5060 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
5061 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
5062 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
5063 /* 0x80 */ iemOp_jo_Jv,
5064 /* 0x81 */ iemOp_jno_Jv,
5065 /* 0x82 */ iemOp_jc_Jv,
5066 /* 0x83 */ iemOp_jnc_Jv,
5067 /* 0x84 */ iemOp_je_Jv,
5068 /* 0x85 */ iemOp_jne_Jv,
5069 /* 0x86 */ iemOp_jbe_Jv,
5070 /* 0x87 */ iemOp_jnbe_Jv,
5071 /* 0x88 */ iemOp_js_Jv,
5072 /* 0x89 */ iemOp_jns_Jv,
5073 /* 0x8a */ iemOp_jp_Jv,
5074 /* 0x8b */ iemOp_jnp_Jv,
5075 /* 0x8c */ iemOp_jl_Jv,
5076 /* 0x8d */ iemOp_jnl_Jv,
5077 /* 0x8e */ iemOp_jle_Jv,
5078 /* 0x8f */ iemOp_jnle_Jv,
5079 /* 0x90 */ iemOp_seto_Eb,
5080 /* 0x91 */ iemOp_setno_Eb,
5081 /* 0x92 */ iemOp_setc_Eb,
5082 /* 0x93 */ iemOp_setnc_Eb,
5083 /* 0x94 */ iemOp_sete_Eb,
5084 /* 0x95 */ iemOp_setne_Eb,
5085 /* 0x96 */ iemOp_setbe_Eb,
5086 /* 0x97 */ iemOp_setnbe_Eb,
5087 /* 0x98 */ iemOp_sets_Eb,
5088 /* 0x99 */ iemOp_setns_Eb,
5089 /* 0x9a */ iemOp_setp_Eb,
5090 /* 0x9b */ iemOp_setnp_Eb,
5091 /* 0x9c */ iemOp_setl_Eb,
5092 /* 0x9d */ iemOp_setnl_Eb,
5093 /* 0x9e */ iemOp_setle_Eb,
5094 /* 0x9f */ iemOp_setnle_Eb,
5095 /* 0xa0 */ iemOp_push_fs,
5096 /* 0xa1 */ iemOp_pop_fs,
5097 /* 0xa2 */ iemOp_cpuid,
5098 /* 0xa3 */ iemOp_bt_Ev_Gv,
5099 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
5100 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
5101 /* 0xa6 */ iemOp_Invalid,
5102 /* 0xa7 */ iemOp_Invalid,
5103 /* 0xa8 */ iemOp_push_gs,
5104 /* 0xa9 */ iemOp_pop_gs,
5105 /* 0xaa */ iemOp_rsm,
5106 /* 0xab */ iemOp_bts_Ev_Gv,
5107 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
5108 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
5109 /* 0xae */ iemOp_Grp15,
5110 /* 0xaf */ iemOp_imul_Gv_Ev,
5111 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
5112 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
5113 /* 0xb2 */ iemOp_lss_Gv_Mp,
5114 /* 0xb3 */ iemOp_btr_Ev_Gv,
5115 /* 0xb4 */ iemOp_lfs_Gv_Mp,
5116 /* 0xb5 */ iemOp_lgs_Gv_Mp,
5117 /* 0xb6 */ iemOp_movzx_Gv_Eb,
5118 /* 0xb7 */ iemOp_movzx_Gv_Ew,
5119 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
5120 /* 0xb9 */ iemOp_Grp10,
5121 /* 0xba */ iemOp_Grp8,
5122 /* 0xbd */ iemOp_btc_Ev_Gv,
5123 /* 0xbc */ iemOp_bsf_Gv_Ev,
5124 /* 0xbd */ iemOp_bsr_Gv_Ev,
5125 /* 0xbe */ iemOp_movsx_Gv_Eb,
5126 /* 0xbf */ iemOp_movsx_Gv_Ew,
5127 /* 0xc0 */ iemOp_xadd_Eb_Gb,
5128 /* 0xc1 */ iemOp_xadd_Ev_Gv,
5129 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
5130 /* 0xc3 */ iemOp_movnti_My_Gy,
5131 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
5132 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
5133 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
5134 /* 0xc7 */ iemOp_Grp9,
5135 /* 0xc8 */ iemOp_bswap_rAX_r8,
5136 /* 0xc9 */ iemOp_bswap_rCX_r9,
5137 /* 0xca */ iemOp_bswap_rDX_r10,
5138 /* 0xcb */ iemOp_bswap_rBX_r11,
5139 /* 0xcc */ iemOp_bswap_rSP_r12,
5140 /* 0xcd */ iemOp_bswap_rBP_r13,
5141 /* 0xce */ iemOp_bswap_rSI_r14,
5142 /* 0xcf */ iemOp_bswap_rDI_r15,
5143 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
5144 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
5145 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
5146 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
5147 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
5148 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
5149 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
5150 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
5151 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
5152 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
5153 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
5154 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
5155 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
5156 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
5157 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
5158 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
5159 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
5160 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
5161 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
5162 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
5163 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
5164 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
5165 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
5166 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
5167 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
5168 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
5169 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
5170 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
5171 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
5172 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
5173 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
5174 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
5175 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
5176 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
5177 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
5178 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
5179 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
5180 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
5181 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
5182 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
5183 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
5184 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
5185 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
5186 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
5187 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
5188 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
5189 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
5190 /* 0xff */ iemOp_Invalid
5191};
5192
5193/** @} */
5194
5195
5196/** @name One byte opcodes.
5197 *
5198 * @{
5199 */
5200
5201/** Opcode 0x00. */
5202FNIEMOP_DEF(iemOp_add_Eb_Gb)
5203{
5204 IEMOP_MNEMONIC("add Eb,Gb");
5205 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
5206}
5207
5208
5209/** Opcode 0x01. */
5210FNIEMOP_DEF(iemOp_add_Ev_Gv)
5211{
5212 IEMOP_MNEMONIC("add Ev,Gv");
5213 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
5214}
5215
5216
5217/** Opcode 0x02. */
5218FNIEMOP_DEF(iemOp_add_Gb_Eb)
5219{
5220 IEMOP_MNEMONIC("add Gb,Eb");
5221 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
5222}
5223
5224
5225/** Opcode 0x03. */
5226FNIEMOP_DEF(iemOp_add_Gv_Ev)
5227{
5228 IEMOP_MNEMONIC("add Gv,Ev");
5229 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
5230}
5231
5232
5233/** Opcode 0x04. */
5234FNIEMOP_DEF(iemOp_add_Al_Ib)
5235{
5236 IEMOP_MNEMONIC("add al,Ib");
5237 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
5238}
5239
5240
5241/** Opcode 0x05. */
5242FNIEMOP_DEF(iemOp_add_eAX_Iz)
5243{
5244 IEMOP_MNEMONIC("add rAX,Iz");
5245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
5246}
5247
5248
5249/** Opcode 0x06. */
5250FNIEMOP_DEF(iemOp_push_ES)
5251{
5252 IEMOP_MNEMONIC("push es");
5253 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
5254}
5255
5256
5257/** Opcode 0x07. */
5258FNIEMOP_DEF(iemOp_pop_ES)
5259{
5260 IEMOP_MNEMONIC("pop es");
5261 IEMOP_HLP_NO_64BIT();
5262 IEMOP_HLP_NO_LOCK_PREFIX();
5263 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
5264}
5265
5266
5267/** Opcode 0x08. */
5268FNIEMOP_DEF(iemOp_or_Eb_Gb)
5269{
5270 IEMOP_MNEMONIC("or Eb,Gb");
5271 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5272 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
5273}
5274
5275
5276/** Opcode 0x09. */
5277FNIEMOP_DEF(iemOp_or_Ev_Gv)
5278{
5279 IEMOP_MNEMONIC("or Ev,Gv ");
5280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5281 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
5282}
5283
5284
5285/** Opcode 0x0a. */
5286FNIEMOP_DEF(iemOp_or_Gb_Eb)
5287{
5288 IEMOP_MNEMONIC("or Gb,Eb");
5289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5290 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
5291}
5292
5293
5294/** Opcode 0x0b. */
5295FNIEMOP_DEF(iemOp_or_Gv_Ev)
5296{
5297 IEMOP_MNEMONIC("or Gv,Ev");
5298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5299 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
5300}
5301
5302
5303/** Opcode 0x0c. */
5304FNIEMOP_DEF(iemOp_or_Al_Ib)
5305{
5306 IEMOP_MNEMONIC("or al,Ib");
5307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5308 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
5309}
5310
5311
5312/** Opcode 0x0d. */
5313FNIEMOP_DEF(iemOp_or_eAX_Iz)
5314{
5315 IEMOP_MNEMONIC("or rAX,Iz");
5316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5317 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
5318}
5319
5320
5321/** Opcode 0x0e. */
5322FNIEMOP_DEF(iemOp_push_CS)
5323{
5324 IEMOP_MNEMONIC("push cs");
5325 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
5326}
5327
5328
5329/** Opcode 0x0f. */
5330FNIEMOP_DEF(iemOp_2byteEscape)
5331{
5332 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5333 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
5334}
5335
5336/** Opcode 0x10. */
5337FNIEMOP_DEF(iemOp_adc_Eb_Gb)
5338{
5339 IEMOP_MNEMONIC("adc Eb,Gb");
5340 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
5341}
5342
5343
5344/** Opcode 0x11. */
5345FNIEMOP_DEF(iemOp_adc_Ev_Gv)
5346{
5347 IEMOP_MNEMONIC("adc Ev,Gv");
5348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
5349}
5350
5351
5352/** Opcode 0x12. */
5353FNIEMOP_DEF(iemOp_adc_Gb_Eb)
5354{
5355 IEMOP_MNEMONIC("adc Gb,Eb");
5356 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
5357}
5358
5359
5360/** Opcode 0x13. */
5361FNIEMOP_DEF(iemOp_adc_Gv_Ev)
5362{
5363 IEMOP_MNEMONIC("adc Gv,Ev");
5364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
5365}
5366
5367
5368/** Opcode 0x14. */
5369FNIEMOP_DEF(iemOp_adc_Al_Ib)
5370{
5371 IEMOP_MNEMONIC("adc al,Ib");
5372 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
5373}
5374
5375
5376/** Opcode 0x15. */
5377FNIEMOP_DEF(iemOp_adc_eAX_Iz)
5378{
5379 IEMOP_MNEMONIC("adc rAX,Iz");
5380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
5381}
5382
5383
5384/** Opcode 0x16. */
5385FNIEMOP_DEF(iemOp_push_SS)
5386{
5387 IEMOP_MNEMONIC("push ss");
5388 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
5389}
5390
5391
5392/** Opcode 0x17. */
5393FNIEMOP_DEF(iemOp_pop_SS)
5394{
5395 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
5396 IEMOP_HLP_NO_LOCK_PREFIX();
5397 IEMOP_HLP_NO_64BIT();
5398 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
5399}
5400
5401
5402/** Opcode 0x18. */
5403FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
5404{
5405 IEMOP_MNEMONIC("sbb Eb,Gb");
5406 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
5407}
5408
5409
5410/** Opcode 0x19. */
5411FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
5412{
5413 IEMOP_MNEMONIC("sbb Ev,Gv");
5414 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
5415}
5416
5417
5418/** Opcode 0x1a. */
5419FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
5420{
5421 IEMOP_MNEMONIC("sbb Gb,Eb");
5422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
5423}
5424
5425
5426/** Opcode 0x1b. */
5427FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
5428{
5429 IEMOP_MNEMONIC("sbb Gv,Ev");
5430 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
5431}
5432
5433
5434/** Opcode 0x1c. */
5435FNIEMOP_DEF(iemOp_sbb_Al_Ib)
5436{
5437 IEMOP_MNEMONIC("sbb al,Ib");
5438 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
5439}
5440
5441
5442/** Opcode 0x1d. */
5443FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
5444{
5445 IEMOP_MNEMONIC("sbb rAX,Iz");
5446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
5447}
5448
5449
5450/** Opcode 0x1e. */
5451FNIEMOP_DEF(iemOp_push_DS)
5452{
5453 IEMOP_MNEMONIC("push ds");
5454 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
5455}
5456
5457
5458/** Opcode 0x1f. */
5459FNIEMOP_DEF(iemOp_pop_DS)
5460{
5461 IEMOP_MNEMONIC("pop ds");
5462 IEMOP_HLP_NO_LOCK_PREFIX();
5463 IEMOP_HLP_NO_64BIT();
5464 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
5465}
5466
5467
5468/** Opcode 0x20. */
5469FNIEMOP_DEF(iemOp_and_Eb_Gb)
5470{
5471 IEMOP_MNEMONIC("and Eb,Gb");
5472 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
5474}
5475
5476
5477/** Opcode 0x21. */
5478FNIEMOP_DEF(iemOp_and_Ev_Gv)
5479{
5480 IEMOP_MNEMONIC("and Ev,Gv");
5481 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5482 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
5483}
5484
5485
5486/** Opcode 0x22. */
5487FNIEMOP_DEF(iemOp_and_Gb_Eb)
5488{
5489 IEMOP_MNEMONIC("and Gb,Eb");
5490 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5491 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
5492}
5493
5494
5495/** Opcode 0x23. */
5496FNIEMOP_DEF(iemOp_and_Gv_Ev)
5497{
5498 IEMOP_MNEMONIC("and Gv,Ev");
5499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
5501}
5502
5503
5504/** Opcode 0x24. */
5505FNIEMOP_DEF(iemOp_and_Al_Ib)
5506{
5507 IEMOP_MNEMONIC("and al,Ib");
5508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5509 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
5510}
5511
5512
5513/** Opcode 0x25. */
5514FNIEMOP_DEF(iemOp_and_eAX_Iz)
5515{
5516 IEMOP_MNEMONIC("and rAX,Iz");
5517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5518 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
5519}
5520
5521
5522/** Opcode 0x26. */
5523FNIEMOP_DEF(iemOp_seg_ES)
5524{
5525 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
5526 pIemCpu->iEffSeg = X86_SREG_ES;
5527
5528 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5529 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5530}
5531
5532
5533/** Opcode 0x27. */
5534FNIEMOP_STUB(iemOp_daa);
5535
5536
5537/** Opcode 0x28. */
5538FNIEMOP_DEF(iemOp_sub_Eb_Gb)
5539{
5540 IEMOP_MNEMONIC("sub Eb,Gb");
5541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
5542}
5543
5544
5545/** Opcode 0x29. */
5546FNIEMOP_DEF(iemOp_sub_Ev_Gv)
5547{
5548 IEMOP_MNEMONIC("sub Ev,Gv");
5549 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
5550}
5551
5552
5553/** Opcode 0x2a. */
5554FNIEMOP_DEF(iemOp_sub_Gb_Eb)
5555{
5556 IEMOP_MNEMONIC("sub Gb,Eb");
5557 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
5558}
5559
5560
5561/** Opcode 0x2b. */
5562FNIEMOP_DEF(iemOp_sub_Gv_Ev)
5563{
5564 IEMOP_MNEMONIC("sub Gv,Ev");
5565 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
5566}
5567
5568
5569/** Opcode 0x2c. */
5570FNIEMOP_DEF(iemOp_sub_Al_Ib)
5571{
5572 IEMOP_MNEMONIC("sub al,Ib");
5573 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
5574}
5575
5576
5577/** Opcode 0x2d. */
5578FNIEMOP_DEF(iemOp_sub_eAX_Iz)
5579{
5580 IEMOP_MNEMONIC("sub rAX,Iz");
5581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
5582}
5583
5584
5585/** Opcode 0x2e. */
5586FNIEMOP_DEF(iemOp_seg_CS)
5587{
5588 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
5589 pIemCpu->iEffSeg = X86_SREG_CS;
5590
5591 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5592 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5593}
5594
5595
5596/** Opcode 0x2f. */
5597FNIEMOP_STUB(iemOp_das);
5598
5599
5600/** Opcode 0x30. */
5601FNIEMOP_DEF(iemOp_xor_Eb_Gb)
5602{
5603 IEMOP_MNEMONIC("xor Eb,Gb");
5604 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5605 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
5606}
5607
5608
5609/** Opcode 0x31. */
5610FNIEMOP_DEF(iemOp_xor_Ev_Gv)
5611{
5612 IEMOP_MNEMONIC("xor Ev,Gv");
5613 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5614 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
5615}
5616
5617
5618/** Opcode 0x32. */
5619FNIEMOP_DEF(iemOp_xor_Gb_Eb)
5620{
5621 IEMOP_MNEMONIC("xor Gb,Eb");
5622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5623 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
5624}
5625
5626
5627/** Opcode 0x33. */
5628FNIEMOP_DEF(iemOp_xor_Gv_Ev)
5629{
5630 IEMOP_MNEMONIC("xor Gv,Ev");
5631 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5632 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
5633}
5634
5635
5636/** Opcode 0x34. */
5637FNIEMOP_DEF(iemOp_xor_Al_Ib)
5638{
5639 IEMOP_MNEMONIC("xor al,Ib");
5640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
5642}
5643
5644
5645/** Opcode 0x35. */
5646FNIEMOP_DEF(iemOp_xor_eAX_Iz)
5647{
5648 IEMOP_MNEMONIC("xor rAX,Iz");
5649 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5650 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
5651}
5652
5653
5654/** Opcode 0x36. */
5655FNIEMOP_DEF(iemOp_seg_SS)
5656{
5657 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
5658 pIemCpu->iEffSeg = X86_SREG_SS;
5659
5660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5662}
5663
5664
5665/** Opcode 0x37. */
5666FNIEMOP_STUB(iemOp_aaa);
5667
5668
5669/** Opcode 0x38. */
5670FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
5671{
5672 IEMOP_MNEMONIC("cmp Eb,Gb");
5673 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
5674 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
5675}
5676
5677
5678/** Opcode 0x39. */
5679FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
5680{
5681 IEMOP_MNEMONIC("cmp Ev,Gv");
5682 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
5683 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
5684}
5685
5686
5687/** Opcode 0x3a. */
5688FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
5689{
5690 IEMOP_MNEMONIC("cmp Gb,Eb");
5691 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
5692}
5693
5694
5695/** Opcode 0x3b. */
5696FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
5697{
5698 IEMOP_MNEMONIC("cmp Gv,Ev");
5699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
5700}
5701
5702
5703/** Opcode 0x3c. */
5704FNIEMOP_DEF(iemOp_cmp_Al_Ib)
5705{
5706 IEMOP_MNEMONIC("cmp al,Ib");
5707 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
5708}
5709
5710
5711/** Opcode 0x3d. */
5712FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
5713{
5714 IEMOP_MNEMONIC("cmp rAX,Iz");
5715 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
5716}
5717
5718
5719/** Opcode 0x3e. */
5720FNIEMOP_DEF(iemOp_seg_DS)
5721{
5722 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
5723 pIemCpu->iEffSeg = X86_SREG_DS;
5724
5725 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5726 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5727}
5728
5729
5730/** Opcode 0x3f. */
5731FNIEMOP_STUB(iemOp_aas);
5732
5733/**
5734 * Common 'inc/dec/not/neg register' helper.
5735 */
5736FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
5737{
5738 IEMOP_HLP_NO_LOCK_PREFIX();
5739 switch (pIemCpu->enmEffOpSize)
5740 {
5741 case IEMMODE_16BIT:
5742 IEM_MC_BEGIN(2, 0);
5743 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5744 IEM_MC_ARG(uint32_t *, pEFlags, 1);
5745 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
5746 IEM_MC_REF_EFLAGS(pEFlags);
5747 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
5748 IEM_MC_ADVANCE_RIP();
5749 IEM_MC_END();
5750 return VINF_SUCCESS;
5751
5752 case IEMMODE_32BIT:
5753 IEM_MC_BEGIN(2, 0);
5754 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5755 IEM_MC_ARG(uint32_t *, pEFlags, 1);
5756 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
5757 IEM_MC_REF_EFLAGS(pEFlags);
5758 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
5759 IEM_MC_ADVANCE_RIP();
5760 IEM_MC_END();
5761 return VINF_SUCCESS;
5762
5763 case IEMMODE_64BIT:
5764 IEM_MC_BEGIN(2, 0);
5765 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5766 IEM_MC_ARG(uint32_t *, pEFlags, 1);
5767 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
5768 IEM_MC_REF_EFLAGS(pEFlags);
5769 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
5770 IEM_MC_ADVANCE_RIP();
5771 IEM_MC_END();
5772 return VINF_SUCCESS;
5773 }
5774 return VINF_SUCCESS;
5775}
5776
5777
5778/** Opcode 0x40. */
5779FNIEMOP_DEF(iemOp_inc_eAX)
5780{
5781 /*
5782 * This is a REX prefix in 64-bit mode.
5783 */
5784 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5785 {
5786 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
5787
5788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5790 }
5791
5792 IEMOP_MNEMONIC("inc eAX");
5793 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
5794}
5795
5796
5797/** Opcode 0x41. */
5798FNIEMOP_DEF(iemOp_inc_eCX)
5799{
5800 /*
5801 * This is a REX prefix in 64-bit mode.
5802 */
5803 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5804 {
5805 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
5806 pIemCpu->uRexB = 1 << 3;
5807
5808 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5809 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5810 }
5811
5812 IEMOP_MNEMONIC("inc eCX");
5813 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
5814}
5815
5816
5817/** Opcode 0x42. */
5818FNIEMOP_DEF(iemOp_inc_eDX)
5819{
5820 /*
5821 * This is a REX prefix in 64-bit mode.
5822 */
5823 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5824 {
5825 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
5826 pIemCpu->uRexIndex = 1 << 3;
5827
5828 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5829 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5830 }
5831
5832 IEMOP_MNEMONIC("inc eDX");
5833 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
5834}
5835
5836
5837
5838/** Opcode 0x43. */
5839FNIEMOP_DEF(iemOp_inc_eBX)
5840{
5841 /*
5842 * This is a REX prefix in 64-bit mode.
5843 */
5844 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5845 {
5846 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
5847 pIemCpu->uRexB = 1 << 3;
5848 pIemCpu->uRexIndex = 1 << 3;
5849
5850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5852 }
5853
5854 IEMOP_MNEMONIC("inc eBX");
5855 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
5856}
5857
5858
5859/** Opcode 0x44. */
5860FNIEMOP_DEF(iemOp_inc_eSP)
5861{
5862 /*
5863 * This is a REX prefix in 64-bit mode.
5864 */
5865 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5866 {
5867 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
5868 pIemCpu->uRexReg = 1 << 3;
5869
5870 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5871 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5872 }
5873
5874 IEMOP_MNEMONIC("inc eSP");
5875 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
5876}
5877
5878
5879/** Opcode 0x45. */
5880FNIEMOP_DEF(iemOp_inc_eBP)
5881{
5882 /*
5883 * This is a REX prefix in 64-bit mode.
5884 */
5885 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5886 {
5887 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
5888 pIemCpu->uRexReg = 1 << 3;
5889 pIemCpu->uRexB = 1 << 3;
5890
5891 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5892 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5893 }
5894
5895 IEMOP_MNEMONIC("inc eBP");
5896 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
5897}
5898
5899
5900/** Opcode 0x46. */
5901FNIEMOP_DEF(iemOp_inc_eSI)
5902{
5903 /*
5904 * This is a REX prefix in 64-bit mode.
5905 */
5906 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5907 {
5908 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
5909 pIemCpu->uRexReg = 1 << 3;
5910 pIemCpu->uRexIndex = 1 << 3;
5911
5912 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5913 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5914 }
5915
5916 IEMOP_MNEMONIC("inc eSI");
5917 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
5918}
5919
5920
5921/** Opcode 0x47. */
5922FNIEMOP_DEF(iemOp_inc_eDI)
5923{
5924 /*
5925 * This is a REX prefix in 64-bit mode.
5926 */
5927 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5928 {
5929 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
5930 pIemCpu->uRexReg = 1 << 3;
5931 pIemCpu->uRexB = 1 << 3;
5932 pIemCpu->uRexIndex = 1 << 3;
5933
5934 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5935 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5936 }
5937
5938 IEMOP_MNEMONIC("inc eDI");
5939 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
5940}
5941
5942
5943/** Opcode 0x48. */
5944FNIEMOP_DEF(iemOp_dec_eAX)
5945{
5946 /*
5947 * This is a REX prefix in 64-bit mode.
5948 */
5949 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5950 {
5951 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
5952 iemRecalEffOpSize(pIemCpu);
5953
5954 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5955 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5956 }
5957
5958 IEMOP_MNEMONIC("dec eAX");
5959 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
5960}
5961
5962
5963/** Opcode 0x49. */
5964FNIEMOP_DEF(iemOp_dec_eCX)
5965{
5966 /*
5967 * This is a REX prefix in 64-bit mode.
5968 */
5969 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5970 {
5971 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
5972 pIemCpu->uRexB = 1 << 3;
5973 iemRecalEffOpSize(pIemCpu);
5974
5975 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5976 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5977 }
5978
5979 IEMOP_MNEMONIC("dec eCX");
5980 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
5981}
5982
5983
5984/** Opcode 0x4a. */
5985FNIEMOP_DEF(iemOp_dec_eDX)
5986{
5987 /*
5988 * This is a REX prefix in 64-bit mode.
5989 */
5990 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5991 {
5992 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
5993 pIemCpu->uRexIndex = 1 << 3;
5994 iemRecalEffOpSize(pIemCpu);
5995
5996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5997 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5998 }
5999
6000 IEMOP_MNEMONIC("dec eDX");
6001 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
6002}
6003
6004
6005/** Opcode 0x4b. */
6006FNIEMOP_DEF(iemOp_dec_eBX)
6007{
6008 /*
6009 * This is a REX prefix in 64-bit mode.
6010 */
6011 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6012 {
6013 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6014 pIemCpu->uRexB = 1 << 3;
6015 pIemCpu->uRexIndex = 1 << 3;
6016 iemRecalEffOpSize(pIemCpu);
6017
6018 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6019 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6020 }
6021
6022 IEMOP_MNEMONIC("dec eBX");
6023 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
6024}
6025
6026
6027/** Opcode 0x4c. */
6028FNIEMOP_DEF(iemOp_dec_eSP)
6029{
6030 /*
6031 * This is a REX prefix in 64-bit mode.
6032 */
6033 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6034 {
6035 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
6036 pIemCpu->uRexReg = 1 << 3;
6037 iemRecalEffOpSize(pIemCpu);
6038
6039 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6040 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6041 }
6042
6043 IEMOP_MNEMONIC("dec eSP");
6044 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
6045}
6046
6047
6048/** Opcode 0x4d. */
6049FNIEMOP_DEF(iemOp_dec_eBP)
6050{
6051 /*
6052 * This is a REX prefix in 64-bit mode.
6053 */
6054 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6055 {
6056 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
6057 pIemCpu->uRexReg = 1 << 3;
6058 pIemCpu->uRexB = 1 << 3;
6059 iemRecalEffOpSize(pIemCpu);
6060
6061 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6062 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6063 }
6064
6065 IEMOP_MNEMONIC("dec eBP");
6066 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
6067}
6068
6069
6070/** Opcode 0x4e. */
6071FNIEMOP_DEF(iemOp_dec_eSI)
6072{
6073 /*
6074 * This is a REX prefix in 64-bit mode.
6075 */
6076 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6077 {
6078 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6079 pIemCpu->uRexReg = 1 << 3;
6080 pIemCpu->uRexIndex = 1 << 3;
6081 iemRecalEffOpSize(pIemCpu);
6082
6083 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6084 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6085 }
6086
6087 IEMOP_MNEMONIC("dec eSI");
6088 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
6089}
6090
6091
6092/** Opcode 0x4f. */
6093FNIEMOP_DEF(iemOp_dec_eDI)
6094{
6095 /*
6096 * This is a REX prefix in 64-bit mode.
6097 */
6098 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6099 {
6100 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6101 pIemCpu->uRexReg = 1 << 3;
6102 pIemCpu->uRexB = 1 << 3;
6103 pIemCpu->uRexIndex = 1 << 3;
6104 iemRecalEffOpSize(pIemCpu);
6105
6106 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6107 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6108 }
6109
6110 IEMOP_MNEMONIC("dec eDI");
6111 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
6112}
6113
6114
6115/**
6116 * Common 'push register' helper.
6117 */
6118FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
6119{
6120 IEMOP_HLP_NO_LOCK_PREFIX();
6121 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6122 {
6123 iReg |= pIemCpu->uRexB;
6124 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6125 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6126 }
6127
6128 switch (pIemCpu->enmEffOpSize)
6129 {
6130 case IEMMODE_16BIT:
6131 IEM_MC_BEGIN(0, 1);
6132 IEM_MC_LOCAL(uint16_t, u16Value);
6133 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
6134 IEM_MC_PUSH_U16(u16Value);
6135 IEM_MC_ADVANCE_RIP();
6136 IEM_MC_END();
6137 break;
6138
6139 case IEMMODE_32BIT:
6140 IEM_MC_BEGIN(0, 1);
6141 IEM_MC_LOCAL(uint32_t, u32Value);
6142 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
6143 IEM_MC_PUSH_U32(u32Value);
6144 IEM_MC_ADVANCE_RIP();
6145 IEM_MC_END();
6146 break;
6147
6148 case IEMMODE_64BIT:
6149 IEM_MC_BEGIN(0, 1);
6150 IEM_MC_LOCAL(uint64_t, u64Value);
6151 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
6152 IEM_MC_PUSH_U64(u64Value);
6153 IEM_MC_ADVANCE_RIP();
6154 IEM_MC_END();
6155 break;
6156 }
6157
6158 return VINF_SUCCESS;
6159}
6160
6161
6162/** Opcode 0x50. */
6163FNIEMOP_DEF(iemOp_push_eAX)
6164{
6165 IEMOP_MNEMONIC("push rAX");
6166 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
6167}
6168
6169
6170/** Opcode 0x51. */
6171FNIEMOP_DEF(iemOp_push_eCX)
6172{
6173 IEMOP_MNEMONIC("push rCX");
6174 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
6175}
6176
6177
6178/** Opcode 0x52. */
6179FNIEMOP_DEF(iemOp_push_eDX)
6180{
6181 IEMOP_MNEMONIC("push rDX");
6182 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
6183}
6184
6185
6186/** Opcode 0x53. */
6187FNIEMOP_DEF(iemOp_push_eBX)
6188{
6189 IEMOP_MNEMONIC("push rBX");
6190 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
6191}
6192
6193
6194/** Opcode 0x54. */
6195FNIEMOP_DEF(iemOp_push_eSP)
6196{
6197 IEMOP_MNEMONIC("push rSP");
6198 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
6199}
6200
6201
6202/** Opcode 0x55. */
6203FNIEMOP_DEF(iemOp_push_eBP)
6204{
6205 IEMOP_MNEMONIC("push rBP");
6206 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
6207}
6208
6209
6210/** Opcode 0x56. */
6211FNIEMOP_DEF(iemOp_push_eSI)
6212{
6213 IEMOP_MNEMONIC("push rSI");
6214 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
6215}
6216
6217
6218/** Opcode 0x57. */
6219FNIEMOP_DEF(iemOp_push_eDI)
6220{
6221 IEMOP_MNEMONIC("push rDI");
6222 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
6223}
6224
6225
6226/**
6227 * Common 'pop register' helper.
6228 */
6229FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
6230{
6231 IEMOP_HLP_NO_LOCK_PREFIX();
6232 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6233 {
6234 iReg |= pIemCpu->uRexB;
6235 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6236 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6237 }
6238
6239/** @todo How does this code handle iReg==X86_GREG_xSP. How does a real CPU
6240 * handle it, for that matter (Intel pseudo code hints that the popped
6241 * value is incremented by the stack item size.) Test it, both encodings
6242 * and all three register sizes. */
6243 switch (pIemCpu->enmEffOpSize)
6244 {
6245 case IEMMODE_16BIT:
6246 IEM_MC_BEGIN(0, 1);
6247 IEM_MC_LOCAL(uint16_t, *pu16Dst);
6248 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
6249 IEM_MC_POP_U16(pu16Dst);
6250 IEM_MC_ADVANCE_RIP();
6251 IEM_MC_END();
6252 break;
6253
6254 case IEMMODE_32BIT:
6255 IEM_MC_BEGIN(0, 1);
6256 IEM_MC_LOCAL(uint32_t, *pu32Dst);
6257 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6258 IEM_MC_POP_U32(pu32Dst);
6259 IEM_MC_ADVANCE_RIP();
6260 IEM_MC_END();
6261 break;
6262
6263 case IEMMODE_64BIT:
6264 IEM_MC_BEGIN(0, 1);
6265 IEM_MC_LOCAL(uint64_t, *pu64Dst);
6266 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6267 IEM_MC_POP_U64(pu64Dst);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 break;
6271 }
6272
6273 return VINF_SUCCESS;
6274}
6275
6276
6277/** Opcode 0x58. */
6278FNIEMOP_DEF(iemOp_pop_eAX)
6279{
6280 IEMOP_MNEMONIC("pop rAX");
6281 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
6282}
6283
6284
6285/** Opcode 0x59. */
6286FNIEMOP_DEF(iemOp_pop_eCX)
6287{
6288 IEMOP_MNEMONIC("pop rCX");
6289 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
6290}
6291
6292
6293/** Opcode 0x5a. */
6294FNIEMOP_DEF(iemOp_pop_eDX)
6295{
6296 IEMOP_MNEMONIC("pop rDX");
6297 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
6298}
6299
6300
6301/** Opcode 0x5b. */
6302FNIEMOP_DEF(iemOp_pop_eBX)
6303{
6304 IEMOP_MNEMONIC("pop rBX");
6305 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
6306}
6307
6308
6309/** Opcode 0x5c. */
6310FNIEMOP_DEF(iemOp_pop_eSP)
6311{
6312 IEMOP_MNEMONIC("pop rSP");
6313 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
6314}
6315
6316
6317/** Opcode 0x5d. */
6318FNIEMOP_DEF(iemOp_pop_eBP)
6319{
6320 IEMOP_MNEMONIC("pop rBP");
6321 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
6322}
6323
6324
6325/** Opcode 0x5e. */
6326FNIEMOP_DEF(iemOp_pop_eSI)
6327{
6328 IEMOP_MNEMONIC("pop rSI");
6329 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
6330}
6331
6332
6333/** Opcode 0x5f. */
6334FNIEMOP_DEF(iemOp_pop_eDI)
6335{
6336 IEMOP_MNEMONIC("pop rDI");
6337 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
6338}
6339
6340
6341/** Opcode 0x60. */
6342FNIEMOP_DEF(iemOp_pusha)
6343{
6344 IEMOP_MNEMONIC("pusha");
6345 IEMOP_HLP_NO_64BIT();
6346 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6347 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
6348 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6349 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
6350}
6351
6352
6353/** Opcode 0x61. */
6354FNIEMOP_DEF(iemOp_popa)
6355{
6356 IEMOP_MNEMONIC("popa");
6357 IEMOP_HLP_NO_64BIT();
6358 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6359 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
6360 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6361 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
6362}
6363
6364
6365/** Opcode 0x62. */
6366FNIEMOP_STUB(iemOp_bound_Gv_Ma);
6367/** Opcode 0x63. */
6368FNIEMOP_STUB(iemOp_arpl_Ew_Gw);
6369
6370
6371/** Opcode 0x64. */
6372FNIEMOP_DEF(iemOp_seg_FS)
6373{
6374 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
6375 pIemCpu->iEffSeg = X86_SREG_FS;
6376
6377 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6378 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6379}
6380
6381
6382/** Opcode 0x65. */
6383FNIEMOP_DEF(iemOp_seg_GS)
6384{
6385 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
6386 pIemCpu->iEffSeg = X86_SREG_GS;
6387
6388 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6389 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6390}
6391
6392
6393/** Opcode 0x66. */
6394FNIEMOP_DEF(iemOp_op_size)
6395{
6396 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
6397 iemRecalEffOpSize(pIemCpu);
6398
6399 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6400 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6401}
6402
6403
6404/** Opcode 0x67. */
6405FNIEMOP_DEF(iemOp_addr_size)
6406{
6407 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
6408 switch (pIemCpu->enmDefAddrMode)
6409 {
6410 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6411 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
6412 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6413 default: AssertFailed();
6414 }
6415
6416 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6417 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6418}
6419
6420
6421/** Opcode 0x68. */
6422FNIEMOP_DEF(iemOp_push_Iz)
6423{
6424 IEMOP_MNEMONIC("push Iz");
6425 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6426 switch (pIemCpu->enmEffOpSize)
6427 {
6428 case IEMMODE_16BIT:
6429 {
6430 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6431 IEMOP_HLP_NO_LOCK_PREFIX();
6432 IEM_MC_BEGIN(0,0);
6433 IEM_MC_PUSH_U16(u16Imm);
6434 IEM_MC_ADVANCE_RIP();
6435 IEM_MC_END();
6436 return VINF_SUCCESS;
6437 }
6438
6439 case IEMMODE_32BIT:
6440 {
6441 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6442 IEMOP_HLP_NO_LOCK_PREFIX();
6443 IEM_MC_BEGIN(0,0);
6444 IEM_MC_PUSH_U32(u32Imm);
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 return VINF_SUCCESS;
6448 }
6449
6450 case IEMMODE_64BIT:
6451 {
6452 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6453 IEMOP_HLP_NO_LOCK_PREFIX();
6454 IEM_MC_BEGIN(0,0);
6455 IEM_MC_PUSH_U64(u64Imm);
6456 IEM_MC_ADVANCE_RIP();
6457 IEM_MC_END();
6458 return VINF_SUCCESS;
6459 }
6460
6461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6462 }
6463}
6464
6465
6466/** Opcode 0x69. */
6467FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
6468{
6469 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
6470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6471 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6472
6473 switch (pIemCpu->enmEffOpSize)
6474 {
6475 case IEMMODE_16BIT:
6476 {
6477 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6478 IEMOP_HLP_NO_LOCK_PREFIX();
6479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6480 {
6481 /* register operand */
6482 IEM_MC_BEGIN(3, 1);
6483 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6484 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
6485 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6486 IEM_MC_LOCAL(uint16_t, u16Tmp);
6487
6488 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6489 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6490 IEM_MC_REF_EFLAGS(pEFlags);
6491 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6492 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6493
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 }
6497 else
6498 {
6499 /* memory operand */
6500 IEM_MC_BEGIN(3, 2);
6501 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6502 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
6503 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6504 IEM_MC_LOCAL(uint16_t, u16Tmp);
6505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6506
6507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6508 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6509 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6510 IEM_MC_REF_EFLAGS(pEFlags);
6511 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6512 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6513
6514 IEM_MC_ADVANCE_RIP();
6515 IEM_MC_END();
6516 }
6517 return VINF_SUCCESS;
6518 }
6519
6520 case IEMMODE_32BIT:
6521 {
6522 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6523 IEMOP_HLP_NO_LOCK_PREFIX();
6524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6525 {
6526 /* register operand */
6527 IEM_MC_BEGIN(3, 1);
6528 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6529 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
6530 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6531 IEM_MC_LOCAL(uint32_t, u32Tmp);
6532
6533 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6534 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6535 IEM_MC_REF_EFLAGS(pEFlags);
6536 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6537 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6538
6539 IEM_MC_ADVANCE_RIP();
6540 IEM_MC_END();
6541 }
6542 else
6543 {
6544 /* memory operand */
6545 IEM_MC_BEGIN(3, 2);
6546 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6547 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
6548 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6549 IEM_MC_LOCAL(uint32_t, u32Tmp);
6550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6551
6552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6553 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6554 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6555 IEM_MC_REF_EFLAGS(pEFlags);
6556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6557 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6558
6559 IEM_MC_ADVANCE_RIP();
6560 IEM_MC_END();
6561 }
6562 return VINF_SUCCESS;
6563 }
6564
6565 case IEMMODE_64BIT:
6566 {
6567 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6568 IEMOP_HLP_NO_LOCK_PREFIX();
6569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6570 {
6571 /* register operand */
6572 IEM_MC_BEGIN(3, 1);
6573 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6574 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
6575 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6576 IEM_MC_LOCAL(uint64_t, u64Tmp);
6577
6578 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6579 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6580 IEM_MC_REF_EFLAGS(pEFlags);
6581 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6582 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6583
6584 IEM_MC_ADVANCE_RIP();
6585 IEM_MC_END();
6586 }
6587 else
6588 {
6589 /* memory operand */
6590 IEM_MC_BEGIN(3, 2);
6591 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6592 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
6593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6594 IEM_MC_LOCAL(uint64_t, u64Tmp);
6595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6596
6597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6598 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6599 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6602 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6603
6604 IEM_MC_ADVANCE_RIP();
6605 IEM_MC_END();
6606 }
6607 return VINF_SUCCESS;
6608 }
6609 }
6610 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
6611}
6612
6613
6614/** Opcode 0x6a. */
6615FNIEMOP_DEF(iemOp_push_Ib)
6616{
6617 IEMOP_MNEMONIC("push Ib");
6618 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6619 IEMOP_HLP_NO_LOCK_PREFIX();
6620 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6621
6622 IEM_MC_BEGIN(0,0);
6623 switch (pIemCpu->enmEffOpSize)
6624 {
6625 case IEMMODE_16BIT:
6626 IEM_MC_PUSH_U16(i8Imm);
6627 break;
6628 case IEMMODE_32BIT:
6629 IEM_MC_PUSH_U32(i8Imm);
6630 break;
6631 case IEMMODE_64BIT:
6632 IEM_MC_PUSH_U64(i8Imm);
6633 break;
6634 }
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 return VINF_SUCCESS;
6638}
6639
6640
6641/** Opcode 0x6b. */
6642FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
6643{
6644 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
6645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6646 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6647 IEMOP_HLP_NO_LOCK_PREFIX();
6648 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6649
6650 switch (pIemCpu->enmEffOpSize)
6651 {
6652 case IEMMODE_16BIT:
6653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6654 {
6655 /* register operand */
6656 IEM_MC_BEGIN(3, 1);
6657 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6658 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
6659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6660 IEM_MC_LOCAL(uint16_t, u16Tmp);
6661
6662 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6663 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6664 IEM_MC_REF_EFLAGS(pEFlags);
6665 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6666 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6667
6668 IEM_MC_ADVANCE_RIP();
6669 IEM_MC_END();
6670 }
6671 else
6672 {
6673 /* memory operand */
6674 IEM_MC_BEGIN(3, 2);
6675 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6676 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
6677 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6678 IEM_MC_LOCAL(uint16_t, u16Tmp);
6679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6680
6681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6682 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6683 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6684 IEM_MC_REF_EFLAGS(pEFlags);
6685 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6686 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6687
6688 IEM_MC_ADVANCE_RIP();
6689 IEM_MC_END();
6690 }
6691 return VINF_SUCCESS;
6692
6693 case IEMMODE_32BIT:
6694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6695 {
6696 /* register operand */
6697 IEM_MC_BEGIN(3, 1);
6698 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6699 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
6700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6701 IEM_MC_LOCAL(uint32_t, u32Tmp);
6702
6703 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6704 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6705 IEM_MC_REF_EFLAGS(pEFlags);
6706 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6707 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6708
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 }
6712 else
6713 {
6714 /* memory operand */
6715 IEM_MC_BEGIN(3, 2);
6716 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6717 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
6718 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6719 IEM_MC_LOCAL(uint32_t, u32Tmp);
6720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6721
6722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6723 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6724 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6725 IEM_MC_REF_EFLAGS(pEFlags);
6726 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6727 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6728
6729 IEM_MC_ADVANCE_RIP();
6730 IEM_MC_END();
6731 }
6732 return VINF_SUCCESS;
6733
6734 case IEMMODE_64BIT:
6735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6736 {
6737 /* register operand */
6738 IEM_MC_BEGIN(3, 1);
6739 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6740 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
6741 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6742 IEM_MC_LOCAL(uint64_t, u64Tmp);
6743
6744 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6745 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6746 IEM_MC_REF_EFLAGS(pEFlags);
6747 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6748 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6749
6750 IEM_MC_ADVANCE_RIP();
6751 IEM_MC_END();
6752 }
6753 else
6754 {
6755 /* memory operand */
6756 IEM_MC_BEGIN(3, 2);
6757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6758 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
6759 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6760 IEM_MC_LOCAL(uint64_t, u64Tmp);
6761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6762
6763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6764 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6765 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6766 IEM_MC_REF_EFLAGS(pEFlags);
6767 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6768 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6769
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 }
6773 return VINF_SUCCESS;
6774 }
6775 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
6776}
6777
6778
6779/** Opcode 0x6c. */
6780FNIEMOP_DEF(iemOp_insb_Yb_DX)
6781{
6782 IEMOP_HLP_NO_LOCK_PREFIX();
6783 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6784 {
6785 IEMOP_MNEMONIC("rep ins Yb,DX");
6786 switch (pIemCpu->enmEffAddrMode)
6787 {
6788 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr16);
6789 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr32);
6790 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr64);
6791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6792 }
6793 }
6794 else
6795 {
6796 IEMOP_MNEMONIC("ins Yb,DX");
6797 switch (pIemCpu->enmEffAddrMode)
6798 {
6799 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr16);
6800 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr32);
6801 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr64);
6802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6803 }
6804 }
6805}
6806
6807
6808/** Opcode 0x6d. */
6809FNIEMOP_DEF(iemOp_inswd_Yv_DX)
6810{
6811 IEMOP_HLP_NO_LOCK_PREFIX();
6812 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
6813 {
6814 IEMOP_MNEMONIC("rep ins Yv,DX");
6815 switch (pIemCpu->enmEffOpSize)
6816 {
6817 case IEMMODE_16BIT:
6818 switch (pIemCpu->enmEffAddrMode)
6819 {
6820 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr16);
6821 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr32);
6822 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr64);
6823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6824 }
6825 break;
6826 case IEMMODE_64BIT:
6827 case IEMMODE_32BIT:
6828 switch (pIemCpu->enmEffAddrMode)
6829 {
6830 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr16);
6831 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr32);
6832 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr64);
6833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6834 }
6835 break;
6836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6837 }
6838 }
6839 else
6840 {
6841 IEMOP_MNEMONIC("ins Yv,DX");
6842 switch (pIemCpu->enmEffOpSize)
6843 {
6844 case IEMMODE_16BIT:
6845 switch (pIemCpu->enmEffAddrMode)
6846 {
6847 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr16);
6848 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr32);
6849 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr64);
6850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6851 }
6852 break;
6853 case IEMMODE_64BIT:
6854 case IEMMODE_32BIT:
6855 switch (pIemCpu->enmEffAddrMode)
6856 {
6857 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr16);
6858 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr32);
6859 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr64);
6860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6861 }
6862 break;
6863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6864 }
6865 }
6866}
6867
6868
6869/** Opcode 0x6e. */
6870FNIEMOP_DEF(iemOp_outsb_Yb_DX)
6871{
6872 IEMOP_HLP_NO_LOCK_PREFIX();
6873 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6874 {
6875 IEMOP_MNEMONIC("rep out DX,Yb");
6876 switch (pIemCpu->enmEffAddrMode)
6877 {
6878 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg);
6879 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg);
6880 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg);
6881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6882 }
6883 }
6884 else
6885 {
6886 IEMOP_MNEMONIC("out DX,Yb");
6887 switch (pIemCpu->enmEffAddrMode)
6888 {
6889 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg);
6890 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg);
6891 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg);
6892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6893 }
6894 }
6895}
6896
6897
6898/** Opcode 0x6f. */
6899FNIEMOP_DEF(iemOp_outswd_Yv_DX)
6900{
6901 IEMOP_HLP_NO_LOCK_PREFIX();
6902 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
6903 {
6904 IEMOP_MNEMONIC("rep outs DX,Yv");
6905 switch (pIemCpu->enmEffOpSize)
6906 {
6907 case IEMMODE_16BIT:
6908 switch (pIemCpu->enmEffAddrMode)
6909 {
6910 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg);
6911 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg);
6912 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg);
6913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6914 }
6915 break;
6916 case IEMMODE_64BIT:
6917 case IEMMODE_32BIT:
6918 switch (pIemCpu->enmEffAddrMode)
6919 {
6920 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg);
6921 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg);
6922 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg);
6923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6924 }
6925 break;
6926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6927 }
6928 }
6929 else
6930 {
6931 IEMOP_MNEMONIC("outs DX,Yv");
6932 switch (pIemCpu->enmEffOpSize)
6933 {
6934 case IEMMODE_16BIT:
6935 switch (pIemCpu->enmEffAddrMode)
6936 {
6937 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg);
6938 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg);
6939 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg);
6940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6941 }
6942 break;
6943 case IEMMODE_64BIT:
6944 case IEMMODE_32BIT:
6945 switch (pIemCpu->enmEffAddrMode)
6946 {
6947 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg);
6948 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg);
6949 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg);
6950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6951 }
6952 break;
6953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6954 }
6955 }
6956}
6957
6958
6959/** Opcode 0x70. */
6960FNIEMOP_DEF(iemOp_jo_Jb)
6961{
6962 IEMOP_MNEMONIC("jo Jb");
6963 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6964 IEMOP_HLP_NO_LOCK_PREFIX();
6965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6966
6967 IEM_MC_BEGIN(0, 0);
6968 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6969 IEM_MC_REL_JMP_S8(i8Imm);
6970 } IEM_MC_ELSE() {
6971 IEM_MC_ADVANCE_RIP();
6972 } IEM_MC_ENDIF();
6973 IEM_MC_END();
6974 return VINF_SUCCESS;
6975}
6976
6977
6978/** Opcode 0x71. */
6979FNIEMOP_DEF(iemOp_jno_Jb)
6980{
6981 IEMOP_MNEMONIC("jno Jb");
6982 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6983 IEMOP_HLP_NO_LOCK_PREFIX();
6984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6985
6986 IEM_MC_BEGIN(0, 0);
6987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6988 IEM_MC_ADVANCE_RIP();
6989 } IEM_MC_ELSE() {
6990 IEM_MC_REL_JMP_S8(i8Imm);
6991 } IEM_MC_ENDIF();
6992 IEM_MC_END();
6993 return VINF_SUCCESS;
6994}
6995
6996/** Opcode 0x72. */
6997FNIEMOP_DEF(iemOp_jc_Jb)
6998{
6999 IEMOP_MNEMONIC("jc/jnae Jb");
7000 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7001 IEMOP_HLP_NO_LOCK_PREFIX();
7002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7003
7004 IEM_MC_BEGIN(0, 0);
7005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7006 IEM_MC_REL_JMP_S8(i8Imm);
7007 } IEM_MC_ELSE() {
7008 IEM_MC_ADVANCE_RIP();
7009 } IEM_MC_ENDIF();
7010 IEM_MC_END();
7011 return VINF_SUCCESS;
7012}
7013
7014
7015/** Opcode 0x73. */
7016FNIEMOP_DEF(iemOp_jnc_Jb)
7017{
7018 IEMOP_MNEMONIC("jnc/jnb Jb");
7019 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7020 IEMOP_HLP_NO_LOCK_PREFIX();
7021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7022
7023 IEM_MC_BEGIN(0, 0);
7024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7025 IEM_MC_ADVANCE_RIP();
7026 } IEM_MC_ELSE() {
7027 IEM_MC_REL_JMP_S8(i8Imm);
7028 } IEM_MC_ENDIF();
7029 IEM_MC_END();
7030 return VINF_SUCCESS;
7031}
7032
7033
7034/** Opcode 0x74. */
7035FNIEMOP_DEF(iemOp_je_Jb)
7036{
7037 IEMOP_MNEMONIC("je/jz Jb");
7038 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7039 IEMOP_HLP_NO_LOCK_PREFIX();
7040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7041
7042 IEM_MC_BEGIN(0, 0);
7043 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7044 IEM_MC_REL_JMP_S8(i8Imm);
7045 } IEM_MC_ELSE() {
7046 IEM_MC_ADVANCE_RIP();
7047 } IEM_MC_ENDIF();
7048 IEM_MC_END();
7049 return VINF_SUCCESS;
7050}
7051
7052
7053/** Opcode 0x75. */
7054FNIEMOP_DEF(iemOp_jne_Jb)
7055{
7056 IEMOP_MNEMONIC("jne/jnz Jb");
7057 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7058 IEMOP_HLP_NO_LOCK_PREFIX();
7059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7060
7061 IEM_MC_BEGIN(0, 0);
7062 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7063 IEM_MC_ADVANCE_RIP();
7064 } IEM_MC_ELSE() {
7065 IEM_MC_REL_JMP_S8(i8Imm);
7066 } IEM_MC_ENDIF();
7067 IEM_MC_END();
7068 return VINF_SUCCESS;
7069}
7070
7071
7072/** Opcode 0x76. */
7073FNIEMOP_DEF(iemOp_jbe_Jb)
7074{
7075 IEMOP_MNEMONIC("jbe/jna Jb");
7076 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7077 IEMOP_HLP_NO_LOCK_PREFIX();
7078 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7079
7080 IEM_MC_BEGIN(0, 0);
7081 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7082 IEM_MC_REL_JMP_S8(i8Imm);
7083 } IEM_MC_ELSE() {
7084 IEM_MC_ADVANCE_RIP();
7085 } IEM_MC_ENDIF();
7086 IEM_MC_END();
7087 return VINF_SUCCESS;
7088}
7089
7090
7091/** Opcode 0x77. */
7092FNIEMOP_DEF(iemOp_jnbe_Jb)
7093{
7094 IEMOP_MNEMONIC("jnbe/ja Jb");
7095 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7096 IEMOP_HLP_NO_LOCK_PREFIX();
7097 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7098
7099 IEM_MC_BEGIN(0, 0);
7100 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7101 IEM_MC_ADVANCE_RIP();
7102 } IEM_MC_ELSE() {
7103 IEM_MC_REL_JMP_S8(i8Imm);
7104 } IEM_MC_ENDIF();
7105 IEM_MC_END();
7106 return VINF_SUCCESS;
7107}
7108
7109
7110/** Opcode 0x78. */
7111FNIEMOP_DEF(iemOp_js_Jb)
7112{
7113 IEMOP_MNEMONIC("js Jb");
7114 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7115 IEMOP_HLP_NO_LOCK_PREFIX();
7116 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7117
7118 IEM_MC_BEGIN(0, 0);
7119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7120 IEM_MC_REL_JMP_S8(i8Imm);
7121 } IEM_MC_ELSE() {
7122 IEM_MC_ADVANCE_RIP();
7123 } IEM_MC_ENDIF();
7124 IEM_MC_END();
7125 return VINF_SUCCESS;
7126}
7127
7128
7129/** Opcode 0x79. */
7130FNIEMOP_DEF(iemOp_jns_Jb)
7131{
7132 IEMOP_MNEMONIC("jns Jb");
7133 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7134 IEMOP_HLP_NO_LOCK_PREFIX();
7135 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7136
7137 IEM_MC_BEGIN(0, 0);
7138 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7139 IEM_MC_ADVANCE_RIP();
7140 } IEM_MC_ELSE() {
7141 IEM_MC_REL_JMP_S8(i8Imm);
7142 } IEM_MC_ENDIF();
7143 IEM_MC_END();
7144 return VINF_SUCCESS;
7145}
7146
7147
7148/** Opcode 0x7a. */
7149FNIEMOP_DEF(iemOp_jp_Jb)
7150{
7151 IEMOP_MNEMONIC("jp Jb");
7152 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7153 IEMOP_HLP_NO_LOCK_PREFIX();
7154 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7155
7156 IEM_MC_BEGIN(0, 0);
7157 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7158 IEM_MC_REL_JMP_S8(i8Imm);
7159 } IEM_MC_ELSE() {
7160 IEM_MC_ADVANCE_RIP();
7161 } IEM_MC_ENDIF();
7162 IEM_MC_END();
7163 return VINF_SUCCESS;
7164}
7165
7166
7167/** Opcode 0x7b. */
7168FNIEMOP_DEF(iemOp_jnp_Jb)
7169{
7170 IEMOP_MNEMONIC("jnp Jb");
7171 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7172 IEMOP_HLP_NO_LOCK_PREFIX();
7173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7174
7175 IEM_MC_BEGIN(0, 0);
7176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7177 IEM_MC_ADVANCE_RIP();
7178 } IEM_MC_ELSE() {
7179 IEM_MC_REL_JMP_S8(i8Imm);
7180 } IEM_MC_ENDIF();
7181 IEM_MC_END();
7182 return VINF_SUCCESS;
7183}
7184
7185
7186/** Opcode 0x7c. */
7187FNIEMOP_DEF(iemOp_jl_Jb)
7188{
7189 IEMOP_MNEMONIC("jl/jnge Jb");
7190 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7191 IEMOP_HLP_NO_LOCK_PREFIX();
7192 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7193
7194 IEM_MC_BEGIN(0, 0);
7195 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7196 IEM_MC_REL_JMP_S8(i8Imm);
7197 } IEM_MC_ELSE() {
7198 IEM_MC_ADVANCE_RIP();
7199 } IEM_MC_ENDIF();
7200 IEM_MC_END();
7201 return VINF_SUCCESS;
7202}
7203
7204
7205/** Opcode 0x7d. */
7206FNIEMOP_DEF(iemOp_jnl_Jb)
7207{
7208 IEMOP_MNEMONIC("jnl/jge Jb");
7209 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7210 IEMOP_HLP_NO_LOCK_PREFIX();
7211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7212
7213 IEM_MC_BEGIN(0, 0);
7214 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7215 IEM_MC_ADVANCE_RIP();
7216 } IEM_MC_ELSE() {
7217 IEM_MC_REL_JMP_S8(i8Imm);
7218 } IEM_MC_ENDIF();
7219 IEM_MC_END();
7220 return VINF_SUCCESS;
7221}
7222
7223
7224/** Opcode 0x7e. */
7225FNIEMOP_DEF(iemOp_jle_Jb)
7226{
7227 IEMOP_MNEMONIC("jle/jng Jb");
7228 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7229 IEMOP_HLP_NO_LOCK_PREFIX();
7230 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7231
7232 IEM_MC_BEGIN(0, 0);
7233 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7234 IEM_MC_REL_JMP_S8(i8Imm);
7235 } IEM_MC_ELSE() {
7236 IEM_MC_ADVANCE_RIP();
7237 } IEM_MC_ENDIF();
7238 IEM_MC_END();
7239 return VINF_SUCCESS;
7240}
7241
7242
7243/** Opcode 0x7f. */
7244FNIEMOP_DEF(iemOp_jnle_Jb)
7245{
7246 IEMOP_MNEMONIC("jnle/jg Jb");
7247 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7248 IEMOP_HLP_NO_LOCK_PREFIX();
7249 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7250
7251 IEM_MC_BEGIN(0, 0);
7252 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7253 IEM_MC_ADVANCE_RIP();
7254 } IEM_MC_ELSE() {
7255 IEM_MC_REL_JMP_S8(i8Imm);
7256 } IEM_MC_ENDIF();
7257 IEM_MC_END();
7258 return VINF_SUCCESS;
7259}
7260
7261
7262/** Opcode 0x80. */
7263FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
7264{
7265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7266 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
7267 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7268
7269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7270 {
7271 /* register target */
7272 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7273 IEMOP_HLP_NO_LOCK_PREFIX();
7274 IEM_MC_BEGIN(3, 0);
7275 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7276 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7278
7279 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7280 IEM_MC_REF_EFLAGS(pEFlags);
7281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7282
7283 IEM_MC_ADVANCE_RIP();
7284 IEM_MC_END();
7285 }
7286 else
7287 {
7288 /* memory target */
7289 uint32_t fAccess;
7290 if (pImpl->pfnLockedU8)
7291 fAccess = IEM_ACCESS_DATA_RW;
7292 else
7293 { /* CMP */
7294 IEMOP_HLP_NO_LOCK_PREFIX();
7295 fAccess = IEM_ACCESS_DATA_R;
7296 }
7297 IEM_MC_BEGIN(3, 2);
7298 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7299 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7301
7302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7303 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7304 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7305
7306 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7307 IEM_MC_FETCH_EFLAGS(EFlags);
7308 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7309 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7310 else
7311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
7312
7313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
7314 IEM_MC_COMMIT_EFLAGS(EFlags);
7315 IEM_MC_ADVANCE_RIP();
7316 IEM_MC_END();
7317 }
7318 return VINF_SUCCESS;
7319}
7320
7321
7322/** Opcode 0x81. */
7323FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
7324{
7325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7326 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
7327 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7328
7329 switch (pIemCpu->enmEffOpSize)
7330 {
7331 case IEMMODE_16BIT:
7332 {
7333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7334 {
7335 /* register target */
7336 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7337 IEMOP_HLP_NO_LOCK_PREFIX();
7338 IEM_MC_BEGIN(3, 0);
7339 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7340 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
7341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7342
7343 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7344 IEM_MC_REF_EFLAGS(pEFlags);
7345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7346
7347 IEM_MC_ADVANCE_RIP();
7348 IEM_MC_END();
7349 }
7350 else
7351 {
7352 /* memory target */
7353 uint32_t fAccess;
7354 if (pImpl->pfnLockedU16)
7355 fAccess = IEM_ACCESS_DATA_RW;
7356 else
7357 { /* CMP, TEST */
7358 IEMOP_HLP_NO_LOCK_PREFIX();
7359 fAccess = IEM_ACCESS_DATA_R;
7360 }
7361 IEM_MC_BEGIN(3, 2);
7362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7363 IEM_MC_ARG(uint16_t, u16Src, 1);
7364 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7366
7367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7368 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7369 IEM_MC_ASSIGN(u16Src, u16Imm);
7370 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7371 IEM_MC_FETCH_EFLAGS(EFlags);
7372 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7373 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7374 else
7375 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7376
7377 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7378 IEM_MC_COMMIT_EFLAGS(EFlags);
7379 IEM_MC_ADVANCE_RIP();
7380 IEM_MC_END();
7381 }
7382 break;
7383 }
7384
7385 case IEMMODE_32BIT:
7386 {
7387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7388 {
7389 /* register target */
7390 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7391 IEMOP_HLP_NO_LOCK_PREFIX();
7392 IEM_MC_BEGIN(3, 0);
7393 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7394 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
7395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7396
7397 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7398 IEM_MC_REF_EFLAGS(pEFlags);
7399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7400
7401 IEM_MC_ADVANCE_RIP();
7402 IEM_MC_END();
7403 }
7404 else
7405 {
7406 /* memory target */
7407 uint32_t fAccess;
7408 if (pImpl->pfnLockedU32)
7409 fAccess = IEM_ACCESS_DATA_RW;
7410 else
7411 { /* CMP, TEST */
7412 IEMOP_HLP_NO_LOCK_PREFIX();
7413 fAccess = IEM_ACCESS_DATA_R;
7414 }
7415 IEM_MC_BEGIN(3, 2);
7416 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7417 IEM_MC_ARG(uint32_t, u32Src, 1);
7418 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7420
7421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7422 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7423 IEM_MC_ASSIGN(u32Src, u32Imm);
7424 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7425 IEM_MC_FETCH_EFLAGS(EFlags);
7426 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7427 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7428 else
7429 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7430
7431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7432 IEM_MC_COMMIT_EFLAGS(EFlags);
7433 IEM_MC_ADVANCE_RIP();
7434 IEM_MC_END();
7435 }
7436 break;
7437 }
7438
7439 case IEMMODE_64BIT:
7440 {
7441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7442 {
7443 /* register target */
7444 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7445 IEMOP_HLP_NO_LOCK_PREFIX();
7446 IEM_MC_BEGIN(3, 0);
7447 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7448 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
7449 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7450
7451 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7452 IEM_MC_REF_EFLAGS(pEFlags);
7453 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7454
7455 IEM_MC_ADVANCE_RIP();
7456 IEM_MC_END();
7457 }
7458 else
7459 {
7460 /* memory target */
7461 uint32_t fAccess;
7462 if (pImpl->pfnLockedU64)
7463 fAccess = IEM_ACCESS_DATA_RW;
7464 else
7465 { /* CMP */
7466 IEMOP_HLP_NO_LOCK_PREFIX();
7467 fAccess = IEM_ACCESS_DATA_R;
7468 }
7469 IEM_MC_BEGIN(3, 2);
7470 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7471 IEM_MC_ARG(uint64_t, u64Src, 1);
7472 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7474
7475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7476 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7477 IEM_MC_ASSIGN(u64Src, u64Imm);
7478 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7479 IEM_MC_FETCH_EFLAGS(EFlags);
7480 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7482 else
7483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7484
7485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7486 IEM_MC_COMMIT_EFLAGS(EFlags);
7487 IEM_MC_ADVANCE_RIP();
7488 IEM_MC_END();
7489 }
7490 break;
7491 }
7492 }
7493 return VINF_SUCCESS;
7494}
7495
7496
7497/** Opcode 0x82. */
7498FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
7499{
7500 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
7501 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
7502}
7503
7504
7505/** Opcode 0x83. */
7506FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
7507{
7508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7509 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
7510 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7511
7512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7513 {
7514 /*
7515 * Register target
7516 */
7517 IEMOP_HLP_NO_LOCK_PREFIX();
7518 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7519 switch (pIemCpu->enmEffOpSize)
7520 {
7521 case IEMMODE_16BIT:
7522 {
7523 IEM_MC_BEGIN(3, 0);
7524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7525 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
7526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7527
7528 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7529 IEM_MC_REF_EFLAGS(pEFlags);
7530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7531
7532 IEM_MC_ADVANCE_RIP();
7533 IEM_MC_END();
7534 break;
7535 }
7536
7537 case IEMMODE_32BIT:
7538 {
7539 IEM_MC_BEGIN(3, 0);
7540 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7541 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
7542 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7543
7544 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7545 IEM_MC_REF_EFLAGS(pEFlags);
7546 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7547
7548 IEM_MC_ADVANCE_RIP();
7549 IEM_MC_END();
7550 break;
7551 }
7552
7553 case IEMMODE_64BIT:
7554 {
7555 IEM_MC_BEGIN(3, 0);
7556 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7557 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
7558 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7559
7560 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7561 IEM_MC_REF_EFLAGS(pEFlags);
7562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7563
7564 IEM_MC_ADVANCE_RIP();
7565 IEM_MC_END();
7566 break;
7567 }
7568 }
7569 }
7570 else
7571 {
7572 /*
7573 * Memory target.
7574 */
7575 uint32_t fAccess;
7576 if (pImpl->pfnLockedU16)
7577 fAccess = IEM_ACCESS_DATA_RW;
7578 else
7579 { /* CMP */
7580 IEMOP_HLP_NO_LOCK_PREFIX();
7581 fAccess = IEM_ACCESS_DATA_R;
7582 }
7583
7584 switch (pIemCpu->enmEffOpSize)
7585 {
7586 case IEMMODE_16BIT:
7587 {
7588 IEM_MC_BEGIN(3, 2);
7589 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7590 IEM_MC_ARG(uint16_t, u16Src, 1);
7591 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7593
7594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7595 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7596 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
7597 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7598 IEM_MC_FETCH_EFLAGS(EFlags);
7599 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7601 else
7602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7603
7604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7605 IEM_MC_COMMIT_EFLAGS(EFlags);
7606 IEM_MC_ADVANCE_RIP();
7607 IEM_MC_END();
7608 break;
7609 }
7610
7611 case IEMMODE_32BIT:
7612 {
7613 IEM_MC_BEGIN(3, 2);
7614 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7615 IEM_MC_ARG(uint32_t, u32Src, 1);
7616 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7618
7619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7620 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7621 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
7622 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7623 IEM_MC_FETCH_EFLAGS(EFlags);
7624 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7626 else
7627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7628
7629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7630 IEM_MC_COMMIT_EFLAGS(EFlags);
7631 IEM_MC_ADVANCE_RIP();
7632 IEM_MC_END();
7633 break;
7634 }
7635
7636 case IEMMODE_64BIT:
7637 {
7638 IEM_MC_BEGIN(3, 2);
7639 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7640 IEM_MC_ARG(uint64_t, u64Src, 1);
7641 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7643
7644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7646 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
7647 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7648 IEM_MC_FETCH_EFLAGS(EFlags);
7649 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7651 else
7652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7653
7654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7655 IEM_MC_COMMIT_EFLAGS(EFlags);
7656 IEM_MC_ADVANCE_RIP();
7657 IEM_MC_END();
7658 break;
7659 }
7660 }
7661 }
7662 return VINF_SUCCESS;
7663}
7664
7665
7666/** Opcode 0x84. */
7667FNIEMOP_DEF(iemOp_test_Eb_Gb)
7668{
7669 IEMOP_MNEMONIC("test Eb,Gb");
7670 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7672 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
7673}
7674
7675
7676/** Opcode 0x85. */
7677FNIEMOP_DEF(iemOp_test_Ev_Gv)
7678{
7679 IEMOP_MNEMONIC("test Ev,Gv");
7680 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7681 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7682 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
7683}
7684
7685
7686/** Opcode 0x86. */
7687FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
7688{
7689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7690 IEMOP_MNEMONIC("xchg Eb,Gb");
7691
7692 /*
7693 * If rm is denoting a register, no more instruction bytes.
7694 */
7695 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7696 {
7697 IEMOP_HLP_NO_LOCK_PREFIX();
7698
7699 IEM_MC_BEGIN(0, 2);
7700 IEM_MC_LOCAL(uint8_t, uTmp1);
7701 IEM_MC_LOCAL(uint8_t, uTmp2);
7702
7703 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7704 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7705 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7706 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7707
7708 IEM_MC_ADVANCE_RIP();
7709 IEM_MC_END();
7710 }
7711 else
7712 {
7713 /*
7714 * We're accessing memory.
7715 */
7716/** @todo the register must be committed separately! */
7717 IEM_MC_BEGIN(2, 2);
7718 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
7719 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7721
7722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7723 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7724 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7725 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
7726 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
7727
7728 IEM_MC_ADVANCE_RIP();
7729 IEM_MC_END();
7730 }
7731 return VINF_SUCCESS;
7732}
7733
7734
7735/** Opcode 0x87. */
7736FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
7737{
7738 IEMOP_MNEMONIC("xchg Ev,Gv");
7739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7740
7741 /*
7742 * If rm is denoting a register, no more instruction bytes.
7743 */
7744 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7745 {
7746 IEMOP_HLP_NO_LOCK_PREFIX();
7747
7748 switch (pIemCpu->enmEffOpSize)
7749 {
7750 case IEMMODE_16BIT:
7751 IEM_MC_BEGIN(0, 2);
7752 IEM_MC_LOCAL(uint16_t, uTmp1);
7753 IEM_MC_LOCAL(uint16_t, uTmp2);
7754
7755 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7756 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7757 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7758 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7759
7760 IEM_MC_ADVANCE_RIP();
7761 IEM_MC_END();
7762 return VINF_SUCCESS;
7763
7764 case IEMMODE_32BIT:
7765 IEM_MC_BEGIN(0, 2);
7766 IEM_MC_LOCAL(uint32_t, uTmp1);
7767 IEM_MC_LOCAL(uint32_t, uTmp2);
7768
7769 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7770 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7771 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7772 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7773
7774 IEM_MC_ADVANCE_RIP();
7775 IEM_MC_END();
7776 return VINF_SUCCESS;
7777
7778 case IEMMODE_64BIT:
7779 IEM_MC_BEGIN(0, 2);
7780 IEM_MC_LOCAL(uint64_t, uTmp1);
7781 IEM_MC_LOCAL(uint64_t, uTmp2);
7782
7783 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7784 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7785 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7786 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7787
7788 IEM_MC_ADVANCE_RIP();
7789 IEM_MC_END();
7790 return VINF_SUCCESS;
7791
7792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7793 }
7794 }
7795 else
7796 {
7797 /*
7798 * We're accessing memory.
7799 */
7800 switch (pIemCpu->enmEffOpSize)
7801 {
7802/** @todo the register must be committed separately! */
7803 case IEMMODE_16BIT:
7804 IEM_MC_BEGIN(2, 2);
7805 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
7806 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7808
7809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7810 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7811 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7812 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
7813 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
7814
7815 IEM_MC_ADVANCE_RIP();
7816 IEM_MC_END();
7817 return VINF_SUCCESS;
7818
7819 case IEMMODE_32BIT:
7820 IEM_MC_BEGIN(2, 2);
7821 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
7822 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7824
7825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7826 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7827 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7828 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
7829 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
7830
7831 IEM_MC_ADVANCE_RIP();
7832 IEM_MC_END();
7833 return VINF_SUCCESS;
7834
7835 case IEMMODE_64BIT:
7836 IEM_MC_BEGIN(2, 2);
7837 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
7838 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7840
7841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7842 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7843 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7844 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
7845 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
7846
7847 IEM_MC_ADVANCE_RIP();
7848 IEM_MC_END();
7849 return VINF_SUCCESS;
7850
7851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7852 }
7853 }
7854}
7855
7856
7857/** Opcode 0x88. */
7858FNIEMOP_DEF(iemOp_mov_Eb_Gb)
7859{
7860 IEMOP_MNEMONIC("mov Eb,Gb");
7861
7862 uint8_t bRm;
7863 IEM_OPCODE_GET_NEXT_U8(&bRm);
7864 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7865
7866 /*
7867 * If rm is denoting a register, no more instruction bytes.
7868 */
7869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7870 {
7871 IEM_MC_BEGIN(0, 1);
7872 IEM_MC_LOCAL(uint8_t, u8Value);
7873 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7874 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
7875 IEM_MC_ADVANCE_RIP();
7876 IEM_MC_END();
7877 }
7878 else
7879 {
7880 /*
7881 * We're writing a register to memory.
7882 */
7883 IEM_MC_BEGIN(0, 2);
7884 IEM_MC_LOCAL(uint8_t, u8Value);
7885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7887 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7888 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
7889 IEM_MC_ADVANCE_RIP();
7890 IEM_MC_END();
7891 }
7892 return VINF_SUCCESS;
7893
7894}
7895
7896
7897/** Opcode 0x89. */
7898FNIEMOP_DEF(iemOp_mov_Ev_Gv)
7899{
7900 IEMOP_MNEMONIC("mov Ev,Gv");
7901
7902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7903 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7904
7905 /*
7906 * If rm is denoting a register, no more instruction bytes.
7907 */
7908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7909 {
7910 switch (pIemCpu->enmEffOpSize)
7911 {
7912 case IEMMODE_16BIT:
7913 IEM_MC_BEGIN(0, 1);
7914 IEM_MC_LOCAL(uint16_t, u16Value);
7915 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7916 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
7917 IEM_MC_ADVANCE_RIP();
7918 IEM_MC_END();
7919 break;
7920
7921 case IEMMODE_32BIT:
7922 IEM_MC_BEGIN(0, 1);
7923 IEM_MC_LOCAL(uint32_t, u32Value);
7924 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7925 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
7926 IEM_MC_ADVANCE_RIP();
7927 IEM_MC_END();
7928 break;
7929
7930 case IEMMODE_64BIT:
7931 IEM_MC_BEGIN(0, 1);
7932 IEM_MC_LOCAL(uint64_t, u64Value);
7933 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7934 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
7935 IEM_MC_ADVANCE_RIP();
7936 IEM_MC_END();
7937 break;
7938 }
7939 }
7940 else
7941 {
7942 /*
7943 * We're writing a register to memory.
7944 */
7945 switch (pIemCpu->enmEffOpSize)
7946 {
7947 case IEMMODE_16BIT:
7948 IEM_MC_BEGIN(0, 2);
7949 IEM_MC_LOCAL(uint16_t, u16Value);
7950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7952 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7953 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
7954 IEM_MC_ADVANCE_RIP();
7955 IEM_MC_END();
7956 break;
7957
7958 case IEMMODE_32BIT:
7959 IEM_MC_BEGIN(0, 2);
7960 IEM_MC_LOCAL(uint32_t, u32Value);
7961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7963 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7964 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
7965 IEM_MC_ADVANCE_RIP();
7966 IEM_MC_END();
7967 break;
7968
7969 case IEMMODE_64BIT:
7970 IEM_MC_BEGIN(0, 2);
7971 IEM_MC_LOCAL(uint64_t, u64Value);
7972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7974 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7975 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
7976 IEM_MC_ADVANCE_RIP();
7977 IEM_MC_END();
7978 break;
7979 }
7980 }
7981 return VINF_SUCCESS;
7982}
7983
7984
7985/** Opcode 0x8a. */
7986FNIEMOP_DEF(iemOp_mov_Gb_Eb)
7987{
7988 IEMOP_MNEMONIC("mov Gb,Eb");
7989
7990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7991 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7992
7993 /*
7994 * If rm is denoting a register, no more instruction bytes.
7995 */
7996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7997 {
7998 IEM_MC_BEGIN(0, 1);
7999 IEM_MC_LOCAL(uint8_t, u8Value);
8000 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8001 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8002 IEM_MC_ADVANCE_RIP();
8003 IEM_MC_END();
8004 }
8005 else
8006 {
8007 /*
8008 * We're loading a register from memory.
8009 */
8010 IEM_MC_BEGIN(0, 2);
8011 IEM_MC_LOCAL(uint8_t, u8Value);
8012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8014 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
8015 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8016 IEM_MC_ADVANCE_RIP();
8017 IEM_MC_END();
8018 }
8019 return VINF_SUCCESS;
8020}
8021
8022
8023/** Opcode 0x8b. */
8024FNIEMOP_DEF(iemOp_mov_Gv_Ev)
8025{
8026 IEMOP_MNEMONIC("mov Gv,Ev");
8027
8028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8029 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8030
8031 /*
8032 * If rm is denoting a register, no more instruction bytes.
8033 */
8034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8035 {
8036 switch (pIemCpu->enmEffOpSize)
8037 {
8038 case IEMMODE_16BIT:
8039 IEM_MC_BEGIN(0, 1);
8040 IEM_MC_LOCAL(uint16_t, u16Value);
8041 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8042 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8043 IEM_MC_ADVANCE_RIP();
8044 IEM_MC_END();
8045 break;
8046
8047 case IEMMODE_32BIT:
8048 IEM_MC_BEGIN(0, 1);
8049 IEM_MC_LOCAL(uint32_t, u32Value);
8050 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8051 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8052 IEM_MC_ADVANCE_RIP();
8053 IEM_MC_END();
8054 break;
8055
8056 case IEMMODE_64BIT:
8057 IEM_MC_BEGIN(0, 1);
8058 IEM_MC_LOCAL(uint64_t, u64Value);
8059 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8060 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8061 IEM_MC_ADVANCE_RIP();
8062 IEM_MC_END();
8063 break;
8064 }
8065 }
8066 else
8067 {
8068 /*
8069 * We're loading a register from memory.
8070 */
8071 switch (pIemCpu->enmEffOpSize)
8072 {
8073 case IEMMODE_16BIT:
8074 IEM_MC_BEGIN(0, 2);
8075 IEM_MC_LOCAL(uint16_t, u16Value);
8076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8078 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8079 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8080 IEM_MC_ADVANCE_RIP();
8081 IEM_MC_END();
8082 break;
8083
8084 case IEMMODE_32BIT:
8085 IEM_MC_BEGIN(0, 2);
8086 IEM_MC_LOCAL(uint32_t, u32Value);
8087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8089 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
8090 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8091 IEM_MC_ADVANCE_RIP();
8092 IEM_MC_END();
8093 break;
8094
8095 case IEMMODE_64BIT:
8096 IEM_MC_BEGIN(0, 2);
8097 IEM_MC_LOCAL(uint64_t, u64Value);
8098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8100 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8101 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8102 IEM_MC_ADVANCE_RIP();
8103 IEM_MC_END();
8104 break;
8105 }
8106 }
8107 return VINF_SUCCESS;
8108}
8109
8110
8111/** Opcode 0x8c. */
8112FNIEMOP_DEF(iemOp_mov_Ev_Sw)
8113{
8114 IEMOP_MNEMONIC("mov Ev,Sw");
8115
8116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8117 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8118
8119 /*
8120 * Check that the destination register exists. The REX.R prefix is ignored.
8121 */
8122 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8123 if ( iSegReg > X86_SREG_GS)
8124 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8125
8126 /*
8127 * If rm is denoting a register, no more instruction bytes.
8128 * In that case, the operand size is respected and the upper bits are
8129 * cleared (starting with some pentium).
8130 */
8131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8132 {
8133 switch (pIemCpu->enmEffOpSize)
8134 {
8135 case IEMMODE_16BIT:
8136 IEM_MC_BEGIN(0, 1);
8137 IEM_MC_LOCAL(uint16_t, u16Value);
8138 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8139 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
8140 IEM_MC_ADVANCE_RIP();
8141 IEM_MC_END();
8142 break;
8143
8144 case IEMMODE_32BIT:
8145 IEM_MC_BEGIN(0, 1);
8146 IEM_MC_LOCAL(uint32_t, u32Value);
8147 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
8148 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
8149 IEM_MC_ADVANCE_RIP();
8150 IEM_MC_END();
8151 break;
8152
8153 case IEMMODE_64BIT:
8154 IEM_MC_BEGIN(0, 1);
8155 IEM_MC_LOCAL(uint64_t, u64Value);
8156 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
8157 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
8158 IEM_MC_ADVANCE_RIP();
8159 IEM_MC_END();
8160 break;
8161 }
8162 }
8163 else
8164 {
8165 /*
8166 * We're saving the register to memory. The access is word sized
8167 * regardless of operand size prefixes.
8168 */
8169#if 0 /* not necessary */
8170 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8171#endif
8172 IEM_MC_BEGIN(0, 2);
8173 IEM_MC_LOCAL(uint16_t, u16Value);
8174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8176 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8177 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
8178 IEM_MC_ADVANCE_RIP();
8179 IEM_MC_END();
8180 }
8181 return VINF_SUCCESS;
8182}
8183
8184
8185
8186
8187/** Opcode 0x8d. */
8188FNIEMOP_DEF(iemOp_lea_Gv_M)
8189{
8190 IEMOP_MNEMONIC("lea Gv,M");
8191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8192 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8194 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* no register form */
8195
8196 switch (pIemCpu->enmEffOpSize)
8197 {
8198 case IEMMODE_16BIT:
8199 IEM_MC_BEGIN(0, 2);
8200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8201 IEM_MC_LOCAL(uint16_t, u16Cast);
8202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8203 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
8204 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
8205 IEM_MC_ADVANCE_RIP();
8206 IEM_MC_END();
8207 return VINF_SUCCESS;
8208
8209 case IEMMODE_32BIT:
8210 IEM_MC_BEGIN(0, 2);
8211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8212 IEM_MC_LOCAL(uint32_t, u32Cast);
8213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8214 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
8215 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
8216 IEM_MC_ADVANCE_RIP();
8217 IEM_MC_END();
8218 return VINF_SUCCESS;
8219
8220 case IEMMODE_64BIT:
8221 IEM_MC_BEGIN(0, 1);
8222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8224 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
8225 IEM_MC_ADVANCE_RIP();
8226 IEM_MC_END();
8227 return VINF_SUCCESS;
8228 }
8229 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
8230}
8231
8232
8233/** Opcode 0x8e. */
8234FNIEMOP_DEF(iemOp_mov_Sw_Ev)
8235{
8236 IEMOP_MNEMONIC("mov Sw,Ev");
8237
8238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8239 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8240
8241 /*
8242 * The practical operand size is 16-bit.
8243 */
8244#if 0 /* not necessary */
8245 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8246#endif
8247
8248 /*
8249 * Check that the destination register exists and can be used with this
8250 * instruction. The REX.R prefix is ignored.
8251 */
8252 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8253 if ( iSegReg == X86_SREG_CS
8254 || iSegReg > X86_SREG_GS)
8255 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8256
8257 /*
8258 * If rm is denoting a register, no more instruction bytes.
8259 */
8260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8261 {
8262 IEM_MC_BEGIN(2, 0);
8263 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8264 IEM_MC_ARG(uint16_t, u16Value, 1);
8265 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8266 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8267 IEM_MC_END();
8268 }
8269 else
8270 {
8271 /*
8272 * We're loading the register from memory. The access is word sized
8273 * regardless of operand size prefixes.
8274 */
8275 IEM_MC_BEGIN(2, 1);
8276 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8277 IEM_MC_ARG(uint16_t, u16Value, 1);
8278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8280 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8281 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8282 IEM_MC_END();
8283 }
8284 return VINF_SUCCESS;
8285}
8286
8287
8288/** Opcode 0x8f /0. */
8289FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
8290{
8291 /* This bugger is rather annoying as it requires rSP to be updated before
8292 doing the effective address calculations. Will eventually require a
8293 split between the R/M+SIB decoding and the effective address
8294 calculation - which is something that is required for any attempt at
8295 reusing this code for a recompiler. It may also be good to have if we
8296 need to delay #UD exception caused by invalid lock prefixes.
8297
8298 For now, we'll do a mostly safe interpreter-only implementation here. */
8299 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
8300 * now until tests show it's checked.. */
8301 IEMOP_MNEMONIC("pop Ev");
8302 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8303
8304 /* Register access is relatively easy and can share code. */
8305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8306 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8307
8308 /*
8309 * Memory target.
8310 *
8311 * Intel says that RSP is incremented before it's used in any effective
8312 * address calcuations. This means some serious extra annoyance here since
8313 * we decode and calculate the effective address in one step and like to
8314 * delay committing registers till everything is done.
8315 *
8316 * So, we'll decode and calculate the effective address twice. This will
8317 * require some recoding if turned into a recompiler.
8318 */
8319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
8320
8321#ifndef TST_IEM_CHECK_MC
8322 /* Calc effective address with modified ESP. */
8323 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
8324 RTGCPTR GCPtrEff;
8325 VBOXSTRICTRC rcStrict;
8326 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
8327 if (rcStrict != VINF_SUCCESS)
8328 return rcStrict;
8329 pIemCpu->offOpcode = offOpcodeSaved;
8330
8331 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
8332 uint64_t const RspSaved = pCtx->rsp;
8333 switch (pIemCpu->enmEffOpSize)
8334 {
8335 case IEMMODE_16BIT: iemRegAddToRsp(pCtx, 2); break;
8336 case IEMMODE_32BIT: iemRegAddToRsp(pCtx, 4); break;
8337 case IEMMODE_64BIT: iemRegAddToRsp(pCtx, 8); break;
8338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8339 }
8340 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
8341 Assert(rcStrict == VINF_SUCCESS);
8342 pCtx->rsp = RspSaved;
8343
8344 /* Perform the operation - this should be CImpl. */
8345 RTUINT64U TmpRsp;
8346 TmpRsp.u = pCtx->rsp;
8347 switch (pIemCpu->enmEffOpSize)
8348 {
8349 case IEMMODE_16BIT:
8350 {
8351 uint16_t u16Value;
8352 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
8353 if (rcStrict == VINF_SUCCESS)
8354 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
8355 break;
8356 }
8357
8358 case IEMMODE_32BIT:
8359 {
8360 uint32_t u32Value;
8361 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
8362 if (rcStrict == VINF_SUCCESS)
8363 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
8364 break;
8365 }
8366
8367 case IEMMODE_64BIT:
8368 {
8369 uint64_t u64Value;
8370 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
8371 if (rcStrict == VINF_SUCCESS)
8372 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
8373 break;
8374 }
8375
8376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8377 }
8378 if (rcStrict == VINF_SUCCESS)
8379 {
8380 pCtx->rsp = TmpRsp.u;
8381 iemRegUpdateRip(pIemCpu);
8382 }
8383 return rcStrict;
8384
8385#else
8386 return VERR_IEM_IPE_2;
8387#endif
8388}
8389
8390
8391/** Opcode 0x8f. */
8392FNIEMOP_DEF(iemOp_Grp1A)
8393{
8394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8395 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only pop Ev in this group. */
8396 return IEMOP_RAISE_INVALID_OPCODE();
8397 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
8398}
8399
8400
8401/**
8402 * Common 'xchg reg,rAX' helper.
8403 */
8404FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
8405{
8406 IEMOP_HLP_NO_LOCK_PREFIX();
8407
8408 iReg |= pIemCpu->uRexB;
8409 switch (pIemCpu->enmEffOpSize)
8410 {
8411 case IEMMODE_16BIT:
8412 IEM_MC_BEGIN(0, 2);
8413 IEM_MC_LOCAL(uint16_t, u16Tmp1);
8414 IEM_MC_LOCAL(uint16_t, u16Tmp2);
8415 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
8416 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
8417 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
8418 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
8419 IEM_MC_ADVANCE_RIP();
8420 IEM_MC_END();
8421 return VINF_SUCCESS;
8422
8423 case IEMMODE_32BIT:
8424 IEM_MC_BEGIN(0, 2);
8425 IEM_MC_LOCAL(uint32_t, u32Tmp1);
8426 IEM_MC_LOCAL(uint32_t, u32Tmp2);
8427 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
8428 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
8429 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
8430 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
8431 IEM_MC_ADVANCE_RIP();
8432 IEM_MC_END();
8433 return VINF_SUCCESS;
8434
8435 case IEMMODE_64BIT:
8436 IEM_MC_BEGIN(0, 2);
8437 IEM_MC_LOCAL(uint64_t, u64Tmp1);
8438 IEM_MC_LOCAL(uint64_t, u64Tmp2);
8439 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
8440 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
8441 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
8442 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
8443 IEM_MC_ADVANCE_RIP();
8444 IEM_MC_END();
8445 return VINF_SUCCESS;
8446
8447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8448 }
8449}
8450
8451
8452/** Opcode 0x90. */
8453FNIEMOP_DEF(iemOp_nop)
8454{
8455 /* R8/R8D and RAX/EAX can be exchanged. */
8456 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
8457 {
8458 IEMOP_MNEMONIC("xchg r8,rAX");
8459 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
8460 }
8461
8462 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
8463 IEMOP_MNEMONIC("pause");
8464 else
8465 IEMOP_MNEMONIC("nop");
8466 IEM_MC_BEGIN(0, 0);
8467 IEM_MC_ADVANCE_RIP();
8468 IEM_MC_END();
8469 return VINF_SUCCESS;
8470}
8471
8472
8473/** Opcode 0x91. */
8474FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
8475{
8476 IEMOP_MNEMONIC("xchg rCX,rAX");
8477 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
8478}
8479
8480
8481/** Opcode 0x92. */
8482FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
8483{
8484 IEMOP_MNEMONIC("xchg rDX,rAX");
8485 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
8486}
8487
8488
8489/** Opcode 0x93. */
8490FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
8491{
8492 IEMOP_MNEMONIC("xchg rBX,rAX");
8493 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
8494}
8495
8496
8497/** Opcode 0x94. */
8498FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
8499{
8500 IEMOP_MNEMONIC("xchg rSX,rAX");
8501 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
8502}
8503
8504
8505/** Opcode 0x95. */
8506FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
8507{
8508 IEMOP_MNEMONIC("xchg rBP,rAX");
8509 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
8510}
8511
8512
8513/** Opcode 0x96. */
8514FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
8515{
8516 IEMOP_MNEMONIC("xchg rSI,rAX");
8517 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
8518}
8519
8520
8521/** Opcode 0x97. */
8522FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
8523{
8524 IEMOP_MNEMONIC("xchg rDI,rAX");
8525 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
8526}
8527
8528
8529/** Opcode 0x98. */
8530FNIEMOP_DEF(iemOp_cbw)
8531{
8532 IEMOP_HLP_NO_LOCK_PREFIX();
8533 switch (pIemCpu->enmEffOpSize)
8534 {
8535 case IEMMODE_16BIT:
8536 IEMOP_MNEMONIC("cbw");
8537 IEM_MC_BEGIN(0, 1);
8538 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
8539 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
8540 } IEM_MC_ELSE() {
8541 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
8542 } IEM_MC_ENDIF();
8543 IEM_MC_ADVANCE_RIP();
8544 IEM_MC_END();
8545 return VINF_SUCCESS;
8546
8547 case IEMMODE_32BIT:
8548 IEMOP_MNEMONIC("cwde");
8549 IEM_MC_BEGIN(0, 1);
8550 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
8551 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
8552 } IEM_MC_ELSE() {
8553 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
8554 } IEM_MC_ENDIF();
8555 IEM_MC_ADVANCE_RIP();
8556 IEM_MC_END();
8557 return VINF_SUCCESS;
8558
8559 case IEMMODE_64BIT:
8560 IEMOP_MNEMONIC("cdqe");
8561 IEM_MC_BEGIN(0, 1);
8562 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
8563 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
8564 } IEM_MC_ELSE() {
8565 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
8566 } IEM_MC_ENDIF();
8567 IEM_MC_ADVANCE_RIP();
8568 IEM_MC_END();
8569 return VINF_SUCCESS;
8570
8571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8572 }
8573}
8574
8575
8576/** Opcode 0x99. */
8577FNIEMOP_DEF(iemOp_cwd)
8578{
8579 IEMOP_HLP_NO_LOCK_PREFIX();
8580 switch (pIemCpu->enmEffOpSize)
8581 {
8582 case IEMMODE_16BIT:
8583 IEMOP_MNEMONIC("cwd");
8584 IEM_MC_BEGIN(0, 1);
8585 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
8586 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
8587 } IEM_MC_ELSE() {
8588 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
8589 } IEM_MC_ENDIF();
8590 IEM_MC_ADVANCE_RIP();
8591 IEM_MC_END();
8592 return VINF_SUCCESS;
8593
8594 case IEMMODE_32BIT:
8595 IEMOP_MNEMONIC("cdq");
8596 IEM_MC_BEGIN(0, 1);
8597 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
8598 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
8599 } IEM_MC_ELSE() {
8600 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
8601 } IEM_MC_ENDIF();
8602 IEM_MC_ADVANCE_RIP();
8603 IEM_MC_END();
8604 return VINF_SUCCESS;
8605
8606 case IEMMODE_64BIT:
8607 IEMOP_MNEMONIC("cqo");
8608 IEM_MC_BEGIN(0, 1);
8609 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
8610 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
8611 } IEM_MC_ELSE() {
8612 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
8613 } IEM_MC_ENDIF();
8614 IEM_MC_ADVANCE_RIP();
8615 IEM_MC_END();
8616 return VINF_SUCCESS;
8617
8618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8619 }
8620}
8621
8622
8623/** Opcode 0x9a. */
8624FNIEMOP_DEF(iemOp_call_Ap)
8625{
8626 IEMOP_MNEMONIC("call Ap");
8627 IEMOP_HLP_NO_64BIT();
8628
8629 /* Decode the far pointer address and pass it on to the far call C implementation. */
8630 uint32_t offSeg;
8631 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
8632 IEM_OPCODE_GET_NEXT_U32(&offSeg);
8633 else
8634 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
8635 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
8638}
8639
8640
8641/** Opcode 0x9b. (aka fwait) */
8642FNIEMOP_DEF(iemOp_wait)
8643{
8644 IEMOP_MNEMONIC("wait");
8645 IEMOP_HLP_NO_LOCK_PREFIX();
8646
8647 IEM_MC_BEGIN(0, 0);
8648 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8649 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8650 IEM_MC_ADVANCE_RIP();
8651 IEM_MC_END();
8652 return VINF_SUCCESS;
8653}
8654
8655
8656/** Opcode 0x9c. */
8657FNIEMOP_DEF(iemOp_pushf_Fv)
8658{
8659 IEMOP_HLP_NO_LOCK_PREFIX();
8660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8661 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
8662}
8663
8664
8665/** Opcode 0x9d. */
8666FNIEMOP_DEF(iemOp_popf_Fv)
8667{
8668 IEMOP_HLP_NO_LOCK_PREFIX();
8669 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8670 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
8671}
8672
8673
8674/** Opcode 0x9e. */
8675FNIEMOP_DEF(iemOp_sahf)
8676{
8677 IEMOP_MNEMONIC("sahf");
8678 IEMOP_HLP_NO_LOCK_PREFIX();
8679 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
8680 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
8681 return IEMOP_RAISE_INVALID_OPCODE();
8682 IEM_MC_BEGIN(0, 2);
8683 IEM_MC_LOCAL(uint32_t, u32Flags);
8684 IEM_MC_LOCAL(uint32_t, EFlags);
8685 IEM_MC_FETCH_EFLAGS(EFlags);
8686 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
8687 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8688 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
8689 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
8690 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
8691 IEM_MC_COMMIT_EFLAGS(EFlags);
8692 IEM_MC_ADVANCE_RIP();
8693 IEM_MC_END();
8694 return VINF_SUCCESS;
8695}
8696
8697
8698/** Opcode 0x9f. */
8699FNIEMOP_DEF(iemOp_lahf)
8700{
8701 IEMOP_MNEMONIC("lahf");
8702 IEMOP_HLP_NO_LOCK_PREFIX();
8703 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
8704 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
8705 return IEMOP_RAISE_INVALID_OPCODE();
8706 IEM_MC_BEGIN(0, 1);
8707 IEM_MC_LOCAL(uint8_t, u8Flags);
8708 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
8709 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
8710 IEM_MC_ADVANCE_RIP();
8711 IEM_MC_END();
8712 return VINF_SUCCESS;
8713}
8714
8715
8716/**
8717 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
8718 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
8719 * prefixes. Will return on failures.
8720 * @param a_GCPtrMemOff The variable to store the offset in.
8721 */
8722#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
8723 do \
8724 { \
8725 switch (pIemCpu->enmEffAddrMode) \
8726 { \
8727 case IEMMODE_16BIT: \
8728 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
8729 break; \
8730 case IEMMODE_32BIT: \
8731 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
8732 break; \
8733 case IEMMODE_64BIT: \
8734 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
8735 break; \
8736 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8737 } \
8738 IEMOP_HLP_NO_LOCK_PREFIX(); \
8739 } while (0)
8740
8741/** Opcode 0xa0. */
8742FNIEMOP_DEF(iemOp_mov_Al_Ob)
8743{
8744 /*
8745 * Get the offset and fend of lock prefixes.
8746 */
8747 RTGCPTR GCPtrMemOff;
8748 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8749
8750 /*
8751 * Fetch AL.
8752 */
8753 IEM_MC_BEGIN(0,1);
8754 IEM_MC_LOCAL(uint8_t, u8Tmp);
8755 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8756 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8757 IEM_MC_ADVANCE_RIP();
8758 IEM_MC_END();
8759 return VINF_SUCCESS;
8760}
8761
8762
8763/** Opcode 0xa1. */
8764FNIEMOP_DEF(iemOp_mov_rAX_Ov)
8765{
8766 /*
8767 * Get the offset and fend of lock prefixes.
8768 */
8769 IEMOP_MNEMONIC("mov rAX,Ov");
8770 RTGCPTR GCPtrMemOff;
8771 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8772
8773 /*
8774 * Fetch rAX.
8775 */
8776 switch (pIemCpu->enmEffOpSize)
8777 {
8778 case IEMMODE_16BIT:
8779 IEM_MC_BEGIN(0,1);
8780 IEM_MC_LOCAL(uint16_t, u16Tmp);
8781 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8782 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
8783 IEM_MC_ADVANCE_RIP();
8784 IEM_MC_END();
8785 return VINF_SUCCESS;
8786
8787 case IEMMODE_32BIT:
8788 IEM_MC_BEGIN(0,1);
8789 IEM_MC_LOCAL(uint32_t, u32Tmp);
8790 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8791 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
8792 IEM_MC_ADVANCE_RIP();
8793 IEM_MC_END();
8794 return VINF_SUCCESS;
8795
8796 case IEMMODE_64BIT:
8797 IEM_MC_BEGIN(0,1);
8798 IEM_MC_LOCAL(uint64_t, u64Tmp);
8799 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8800 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
8801 IEM_MC_ADVANCE_RIP();
8802 IEM_MC_END();
8803 return VINF_SUCCESS;
8804
8805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8806 }
8807}
8808
8809
8810/** Opcode 0xa2. */
8811FNIEMOP_DEF(iemOp_mov_Ob_AL)
8812{
8813 /*
8814 * Get the offset and fend of lock prefixes.
8815 */
8816 RTGCPTR GCPtrMemOff;
8817 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8818
8819 /*
8820 * Store AL.
8821 */
8822 IEM_MC_BEGIN(0,1);
8823 IEM_MC_LOCAL(uint8_t, u8Tmp);
8824 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
8825 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
8826 IEM_MC_ADVANCE_RIP();
8827 IEM_MC_END();
8828 return VINF_SUCCESS;
8829}
8830
8831
8832/** Opcode 0xa3. */
8833FNIEMOP_DEF(iemOp_mov_Ov_rAX)
8834{
8835 /*
8836 * Get the offset and fend of lock prefixes.
8837 */
8838 RTGCPTR GCPtrMemOff;
8839 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8840
8841 /*
8842 * Store rAX.
8843 */
8844 switch (pIemCpu->enmEffOpSize)
8845 {
8846 case IEMMODE_16BIT:
8847 IEM_MC_BEGIN(0,1);
8848 IEM_MC_LOCAL(uint16_t, u16Tmp);
8849 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
8850 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
8851 IEM_MC_ADVANCE_RIP();
8852 IEM_MC_END();
8853 return VINF_SUCCESS;
8854
8855 case IEMMODE_32BIT:
8856 IEM_MC_BEGIN(0,1);
8857 IEM_MC_LOCAL(uint32_t, u32Tmp);
8858 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
8859 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
8860 IEM_MC_ADVANCE_RIP();
8861 IEM_MC_END();
8862 return VINF_SUCCESS;
8863
8864 case IEMMODE_64BIT:
8865 IEM_MC_BEGIN(0,1);
8866 IEM_MC_LOCAL(uint64_t, u64Tmp);
8867 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
8868 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
8869 IEM_MC_ADVANCE_RIP();
8870 IEM_MC_END();
8871 return VINF_SUCCESS;
8872
8873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8874 }
8875}
8876
8877/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
8878#define IEM_MOVS_CASE(ValBits, AddrBits) \
8879 IEM_MC_BEGIN(0, 2); \
8880 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
8881 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8882 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
8883 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
8884 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8885 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
8886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8887 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8888 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8889 } IEM_MC_ELSE() { \
8890 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8891 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8892 } IEM_MC_ENDIF(); \
8893 IEM_MC_ADVANCE_RIP(); \
8894 IEM_MC_END();
8895
8896/** Opcode 0xa4. */
8897FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
8898{
8899 IEMOP_HLP_NO_LOCK_PREFIX();
8900
8901 /*
8902 * Use the C implementation if a repeat prefix is encountered.
8903 */
8904 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8905 {
8906 IEMOP_MNEMONIC("rep movsb Xb,Yb");
8907 switch (pIemCpu->enmEffAddrMode)
8908 {
8909 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
8910 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
8911 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
8912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8913 }
8914 }
8915 IEMOP_MNEMONIC("movsb Xb,Yb");
8916
8917 /*
8918 * Sharing case implementation with movs[wdq] below.
8919 */
8920 switch (pIemCpu->enmEffAddrMode)
8921 {
8922 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
8923 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
8924 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
8925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8926 }
8927 return VINF_SUCCESS;
8928}
8929
8930
8931/** Opcode 0xa5. */
8932FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
8933{
8934 IEMOP_HLP_NO_LOCK_PREFIX();
8935
8936 /*
8937 * Use the C implementation if a repeat prefix is encountered.
8938 */
8939 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8940 {
8941 IEMOP_MNEMONIC("rep movs Xv,Yv");
8942 switch (pIemCpu->enmEffOpSize)
8943 {
8944 case IEMMODE_16BIT:
8945 switch (pIemCpu->enmEffAddrMode)
8946 {
8947 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
8948 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
8949 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
8950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8951 }
8952 break;
8953 case IEMMODE_32BIT:
8954 switch (pIemCpu->enmEffAddrMode)
8955 {
8956 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
8957 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
8958 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
8959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8960 }
8961 case IEMMODE_64BIT:
8962 switch (pIemCpu->enmEffAddrMode)
8963 {
8964 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8965 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
8966 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
8967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8968 }
8969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8970 }
8971 }
8972 IEMOP_MNEMONIC("movs Xv,Yv");
8973
8974 /*
8975 * Annoying double switch here.
8976 * Using ugly macro for implementing the cases, sharing it with movsb.
8977 */
8978 switch (pIemCpu->enmEffOpSize)
8979 {
8980 case IEMMODE_16BIT:
8981 switch (pIemCpu->enmEffAddrMode)
8982 {
8983 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
8984 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
8985 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
8986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8987 }
8988 break;
8989
8990 case IEMMODE_32BIT:
8991 switch (pIemCpu->enmEffAddrMode)
8992 {
8993 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
8994 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
8995 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
8996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8997 }
8998 break;
8999
9000 case IEMMODE_64BIT:
9001 switch (pIemCpu->enmEffAddrMode)
9002 {
9003 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9004 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
9005 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
9006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9007 }
9008 break;
9009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9010 }
9011 return VINF_SUCCESS;
9012}
9013
9014#undef IEM_MOVS_CASE
9015
9016/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
9017#define IEM_CMPS_CASE(ValBits, AddrBits) \
9018 IEM_MC_BEGIN(3, 3); \
9019 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
9020 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
9021 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9022 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
9023 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9024 \
9025 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9026 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
9027 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9028 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
9029 IEM_MC_REF_LOCAL(puValue1, uValue1); \
9030 IEM_MC_REF_EFLAGS(pEFlags); \
9031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
9032 \
9033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9034 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9035 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9036 } IEM_MC_ELSE() { \
9037 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9038 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9039 } IEM_MC_ENDIF(); \
9040 IEM_MC_ADVANCE_RIP(); \
9041 IEM_MC_END(); \
9042
9043/** Opcode 0xa6. */
9044FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
9045{
9046 IEMOP_HLP_NO_LOCK_PREFIX();
9047
9048 /*
9049 * Use the C implementation if a repeat prefix is encountered.
9050 */
9051 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9052 {
9053 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9054 switch (pIemCpu->enmEffAddrMode)
9055 {
9056 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
9057 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
9058 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
9059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9060 }
9061 }
9062 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9063 {
9064 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9065 switch (pIemCpu->enmEffAddrMode)
9066 {
9067 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
9068 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
9069 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
9070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9071 }
9072 }
9073 IEMOP_MNEMONIC("cmps Xb,Yb");
9074
9075 /*
9076 * Sharing case implementation with cmps[wdq] below.
9077 */
9078 switch (pIemCpu->enmEffAddrMode)
9079 {
9080 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
9081 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
9082 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
9083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9084 }
9085 return VINF_SUCCESS;
9086
9087}
9088
9089
9090/** Opcode 0xa7. */
9091FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
9092{
9093 IEMOP_HLP_NO_LOCK_PREFIX();
9094
9095 /*
9096 * Use the C implementation if a repeat prefix is encountered.
9097 */
9098 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9099 {
9100 IEMOP_MNEMONIC("repe cmps Xv,Yv");
9101 switch (pIemCpu->enmEffOpSize)
9102 {
9103 case IEMMODE_16BIT:
9104 switch (pIemCpu->enmEffAddrMode)
9105 {
9106 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
9107 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
9108 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
9109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9110 }
9111 break;
9112 case IEMMODE_32BIT:
9113 switch (pIemCpu->enmEffAddrMode)
9114 {
9115 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
9116 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
9117 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
9118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9119 }
9120 case IEMMODE_64BIT:
9121 switch (pIemCpu->enmEffAddrMode)
9122 {
9123 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9124 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
9125 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
9126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9127 }
9128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9129 }
9130 }
9131
9132 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9133 {
9134 IEMOP_MNEMONIC("repne cmps Xv,Yv");
9135 switch (pIemCpu->enmEffOpSize)
9136 {
9137 case IEMMODE_16BIT:
9138 switch (pIemCpu->enmEffAddrMode)
9139 {
9140 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
9141 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
9142 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
9143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9144 }
9145 break;
9146 case IEMMODE_32BIT:
9147 switch (pIemCpu->enmEffAddrMode)
9148 {
9149 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
9150 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
9151 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
9152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9153 }
9154 case IEMMODE_64BIT:
9155 switch (pIemCpu->enmEffAddrMode)
9156 {
9157 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9158 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
9159 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
9160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9161 }
9162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9163 }
9164 }
9165
9166 IEMOP_MNEMONIC("cmps Xv,Yv");
9167
9168 /*
9169 * Annoying double switch here.
9170 * Using ugly macro for implementing the cases, sharing it with cmpsb.
9171 */
9172 switch (pIemCpu->enmEffOpSize)
9173 {
9174 case IEMMODE_16BIT:
9175 switch (pIemCpu->enmEffAddrMode)
9176 {
9177 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
9178 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
9179 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
9180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9181 }
9182 break;
9183
9184 case IEMMODE_32BIT:
9185 switch (pIemCpu->enmEffAddrMode)
9186 {
9187 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
9188 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
9189 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
9190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9191 }
9192 break;
9193
9194 case IEMMODE_64BIT:
9195 switch (pIemCpu->enmEffAddrMode)
9196 {
9197 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9198 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
9199 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
9200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9201 }
9202 break;
9203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9204 }
9205 return VINF_SUCCESS;
9206
9207}
9208
9209#undef IEM_CMPS_CASE
9210
9211/** Opcode 0xa8. */
9212FNIEMOP_DEF(iemOp_test_AL_Ib)
9213{
9214 IEMOP_MNEMONIC("test al,Ib");
9215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
9217}
9218
9219
9220/** Opcode 0xa9. */
9221FNIEMOP_DEF(iemOp_test_eAX_Iz)
9222{
9223 IEMOP_MNEMONIC("test rAX,Iz");
9224 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9225 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
9226}
9227
9228
9229/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
9230#define IEM_STOS_CASE(ValBits, AddrBits) \
9231 IEM_MC_BEGIN(0, 2); \
9232 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9233 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9234 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
9235 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9236 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
9237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9238 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9239 } IEM_MC_ELSE() { \
9240 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9241 } IEM_MC_ENDIF(); \
9242 IEM_MC_ADVANCE_RIP(); \
9243 IEM_MC_END(); \
9244
9245/** Opcode 0xaa. */
9246FNIEMOP_DEF(iemOp_stosb_Yb_AL)
9247{
9248 IEMOP_HLP_NO_LOCK_PREFIX();
9249
9250 /*
9251 * Use the C implementation if a repeat prefix is encountered.
9252 */
9253 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9254 {
9255 IEMOP_MNEMONIC("rep stos Yb,al");
9256 switch (pIemCpu->enmEffAddrMode)
9257 {
9258 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
9259 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
9260 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
9261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9262 }
9263 }
9264 IEMOP_MNEMONIC("stos Yb,al");
9265
9266 /*
9267 * Sharing case implementation with stos[wdq] below.
9268 */
9269 switch (pIemCpu->enmEffAddrMode)
9270 {
9271 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
9272 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
9273 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
9274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9275 }
9276 return VINF_SUCCESS;
9277}
9278
9279
9280/** Opcode 0xab. */
9281FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
9282{
9283 IEMOP_HLP_NO_LOCK_PREFIX();
9284
9285 /*
9286 * Use the C implementation if a repeat prefix is encountered.
9287 */
9288 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9289 {
9290 IEMOP_MNEMONIC("rep stos Yv,rAX");
9291 switch (pIemCpu->enmEffOpSize)
9292 {
9293 case IEMMODE_16BIT:
9294 switch (pIemCpu->enmEffAddrMode)
9295 {
9296 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
9297 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
9298 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
9299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9300 }
9301 break;
9302 case IEMMODE_32BIT:
9303 switch (pIemCpu->enmEffAddrMode)
9304 {
9305 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
9306 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
9307 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
9308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9309 }
9310 case IEMMODE_64BIT:
9311 switch (pIemCpu->enmEffAddrMode)
9312 {
9313 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9314 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
9315 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
9316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9317 }
9318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9319 }
9320 }
9321 IEMOP_MNEMONIC("stos Yv,rAX");
9322
9323 /*
9324 * Annoying double switch here.
9325 * Using ugly macro for implementing the cases, sharing it with stosb.
9326 */
9327 switch (pIemCpu->enmEffOpSize)
9328 {
9329 case IEMMODE_16BIT:
9330 switch (pIemCpu->enmEffAddrMode)
9331 {
9332 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
9333 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
9334 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
9335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9336 }
9337 break;
9338
9339 case IEMMODE_32BIT:
9340 switch (pIemCpu->enmEffAddrMode)
9341 {
9342 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
9343 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
9344 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
9345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9346 }
9347 break;
9348
9349 case IEMMODE_64BIT:
9350 switch (pIemCpu->enmEffAddrMode)
9351 {
9352 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9353 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
9354 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
9355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9356 }
9357 break;
9358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9359 }
9360 return VINF_SUCCESS;
9361}
9362
9363#undef IEM_STOS_CASE
9364
9365/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
9366#define IEM_LODS_CASE(ValBits, AddrBits) \
9367 IEM_MC_BEGIN(0, 2); \
9368 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9369 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9370 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9371 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
9372 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
9373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9374 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9375 } IEM_MC_ELSE() { \
9376 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9377 } IEM_MC_ENDIF(); \
9378 IEM_MC_ADVANCE_RIP(); \
9379 IEM_MC_END();
9380
9381/** Opcode 0xac. */
9382FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
9383{
9384 IEMOP_HLP_NO_LOCK_PREFIX();
9385
9386 /*
9387 * Use the C implementation if a repeat prefix is encountered.
9388 */
9389 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9390 {
9391 IEMOP_MNEMONIC("rep lodsb al,Xb");
9392 switch (pIemCpu->enmEffAddrMode)
9393 {
9394 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
9395 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
9396 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
9397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9398 }
9399 }
9400 IEMOP_MNEMONIC("lodsb al,Xb");
9401
9402 /*
9403 * Sharing case implementation with stos[wdq] below.
9404 */
9405 switch (pIemCpu->enmEffAddrMode)
9406 {
9407 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
9408 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
9409 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
9410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9411 }
9412 return VINF_SUCCESS;
9413}
9414
9415
9416/** Opcode 0xad. */
9417FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
9418{
9419 IEMOP_HLP_NO_LOCK_PREFIX();
9420
9421 /*
9422 * Use the C implementation if a repeat prefix is encountered.
9423 */
9424 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9425 {
9426 IEMOP_MNEMONIC("rep lods rAX,Xv");
9427 switch (pIemCpu->enmEffOpSize)
9428 {
9429 case IEMMODE_16BIT:
9430 switch (pIemCpu->enmEffAddrMode)
9431 {
9432 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
9433 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
9434 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
9435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9436 }
9437 break;
9438 case IEMMODE_32BIT:
9439 switch (pIemCpu->enmEffAddrMode)
9440 {
9441 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
9442 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
9443 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
9444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9445 }
9446 case IEMMODE_64BIT:
9447 switch (pIemCpu->enmEffAddrMode)
9448 {
9449 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9450 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
9451 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
9452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9453 }
9454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9455 }
9456 }
9457 IEMOP_MNEMONIC("lods rAX,Xv");
9458
9459 /*
9460 * Annoying double switch here.
9461 * Using ugly macro for implementing the cases, sharing it with lodsb.
9462 */
9463 switch (pIemCpu->enmEffOpSize)
9464 {
9465 case IEMMODE_16BIT:
9466 switch (pIemCpu->enmEffAddrMode)
9467 {
9468 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
9469 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
9470 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
9471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9472 }
9473 break;
9474
9475 case IEMMODE_32BIT:
9476 switch (pIemCpu->enmEffAddrMode)
9477 {
9478 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
9479 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
9480 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
9481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9482 }
9483 break;
9484
9485 case IEMMODE_64BIT:
9486 switch (pIemCpu->enmEffAddrMode)
9487 {
9488 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9489 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
9490 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
9491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9492 }
9493 break;
9494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9495 }
9496 return VINF_SUCCESS;
9497}
9498
9499#undef IEM_LODS_CASE
9500
9501/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
9502#define IEM_SCAS_CASE(ValBits, AddrBits) \
9503 IEM_MC_BEGIN(3, 2); \
9504 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
9505 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
9506 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9507 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9508 \
9509 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9510 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
9511 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
9512 IEM_MC_REF_EFLAGS(pEFlags); \
9513 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
9514 \
9515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9516 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9517 } IEM_MC_ELSE() { \
9518 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9519 } IEM_MC_ENDIF(); \
9520 IEM_MC_ADVANCE_RIP(); \
9521 IEM_MC_END();
9522
9523/** Opcode 0xae. */
9524FNIEMOP_DEF(iemOp_scasb_AL_Xb)
9525{
9526 IEMOP_HLP_NO_LOCK_PREFIX();
9527
9528 /*
9529 * Use the C implementation if a repeat prefix is encountered.
9530 */
9531 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9532 {
9533 IEMOP_MNEMONIC("repe scasb al,Xb");
9534 switch (pIemCpu->enmEffAddrMode)
9535 {
9536 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
9537 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
9538 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
9539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9540 }
9541 }
9542 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9543 {
9544 IEMOP_MNEMONIC("repne scasb al,Xb");
9545 switch (pIemCpu->enmEffAddrMode)
9546 {
9547 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
9548 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
9549 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
9550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9551 }
9552 }
9553 IEMOP_MNEMONIC("scasb al,Xb");
9554
9555 /*
9556 * Sharing case implementation with stos[wdq] below.
9557 */
9558 switch (pIemCpu->enmEffAddrMode)
9559 {
9560 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
9561 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
9562 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
9563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9564 }
9565 return VINF_SUCCESS;
9566}
9567
9568
9569/** Opcode 0xaf. */
9570FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
9571{
9572 IEMOP_HLP_NO_LOCK_PREFIX();
9573
9574 /*
9575 * Use the C implementation if a repeat prefix is encountered.
9576 */
9577 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9578 {
9579 IEMOP_MNEMONIC("repe scas rAX,Xv");
9580 switch (pIemCpu->enmEffOpSize)
9581 {
9582 case IEMMODE_16BIT:
9583 switch (pIemCpu->enmEffAddrMode)
9584 {
9585 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
9586 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
9587 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
9588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9589 }
9590 break;
9591 case IEMMODE_32BIT:
9592 switch (pIemCpu->enmEffAddrMode)
9593 {
9594 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
9595 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
9596 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
9597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9598 }
9599 case IEMMODE_64BIT:
9600 switch (pIemCpu->enmEffAddrMode)
9601 {
9602 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
9603 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
9604 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
9605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9606 }
9607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9608 }
9609 }
9610 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9611 {
9612 IEMOP_MNEMONIC("repne scas rAX,Xv");
9613 switch (pIemCpu->enmEffOpSize)
9614 {
9615 case IEMMODE_16BIT:
9616 switch (pIemCpu->enmEffAddrMode)
9617 {
9618 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
9619 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
9620 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
9621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9622 }
9623 break;
9624 case IEMMODE_32BIT:
9625 switch (pIemCpu->enmEffAddrMode)
9626 {
9627 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
9628 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
9629 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
9630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9631 }
9632 case IEMMODE_64BIT:
9633 switch (pIemCpu->enmEffAddrMode)
9634 {
9635 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9636 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
9637 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
9638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9639 }
9640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9641 }
9642 }
9643 IEMOP_MNEMONIC("scas rAX,Xv");
9644
9645 /*
9646 * Annoying double switch here.
9647 * Using ugly macro for implementing the cases, sharing it with scasb.
9648 */
9649 switch (pIemCpu->enmEffOpSize)
9650 {
9651 case IEMMODE_16BIT:
9652 switch (pIemCpu->enmEffAddrMode)
9653 {
9654 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
9655 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
9656 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
9657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9658 }
9659 break;
9660
9661 case IEMMODE_32BIT:
9662 switch (pIemCpu->enmEffAddrMode)
9663 {
9664 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
9665 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
9666 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
9667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9668 }
9669 break;
9670
9671 case IEMMODE_64BIT:
9672 switch (pIemCpu->enmEffAddrMode)
9673 {
9674 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9675 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
9676 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
9677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9678 }
9679 break;
9680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9681 }
9682 return VINF_SUCCESS;
9683}
9684
9685#undef IEM_SCAS_CASE
9686
9687/**
9688 * Common 'mov r8, imm8' helper.
9689 */
9690FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
9691{
9692 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9693 IEMOP_HLP_NO_LOCK_PREFIX();
9694
9695 IEM_MC_BEGIN(0, 1);
9696 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
9697 IEM_MC_STORE_GREG_U8(iReg, u8Value);
9698 IEM_MC_ADVANCE_RIP();
9699 IEM_MC_END();
9700
9701 return VINF_SUCCESS;
9702}
9703
9704
9705/** Opcode 0xb0. */
9706FNIEMOP_DEF(iemOp_mov_AL_Ib)
9707{
9708 IEMOP_MNEMONIC("mov AL,Ib");
9709 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX);
9710}
9711
9712
9713/** Opcode 0xb1. */
9714FNIEMOP_DEF(iemOp_CL_Ib)
9715{
9716 IEMOP_MNEMONIC("mov CL,Ib");
9717 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX);
9718}
9719
9720
9721/** Opcode 0xb2. */
9722FNIEMOP_DEF(iemOp_DL_Ib)
9723{
9724 IEMOP_MNEMONIC("mov DL,Ib");
9725 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX);
9726}
9727
9728
9729/** Opcode 0xb3. */
9730FNIEMOP_DEF(iemOp_BL_Ib)
9731{
9732 IEMOP_MNEMONIC("mov BL,Ib");
9733 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX);
9734}
9735
9736
9737/** Opcode 0xb4. */
9738FNIEMOP_DEF(iemOp_mov_AH_Ib)
9739{
9740 IEMOP_MNEMONIC("mov AH,Ib");
9741 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP);
9742}
9743
9744
9745/** Opcode 0xb5. */
9746FNIEMOP_DEF(iemOp_CH_Ib)
9747{
9748 IEMOP_MNEMONIC("mov CH,Ib");
9749 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP);
9750}
9751
9752
9753/** Opcode 0xb6. */
9754FNIEMOP_DEF(iemOp_DH_Ib)
9755{
9756 IEMOP_MNEMONIC("mov DH,Ib");
9757 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI);
9758}
9759
9760
9761/** Opcode 0xb7. */
9762FNIEMOP_DEF(iemOp_BH_Ib)
9763{
9764 IEMOP_MNEMONIC("mov BH,Ib");
9765 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI);
9766}
9767
9768
9769/**
9770 * Common 'mov regX,immX' helper.
9771 */
9772FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
9773{
9774 switch (pIemCpu->enmEffOpSize)
9775 {
9776 case IEMMODE_16BIT:
9777 {
9778 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9779 IEMOP_HLP_NO_LOCK_PREFIX();
9780
9781 IEM_MC_BEGIN(0, 1);
9782 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
9783 IEM_MC_STORE_GREG_U16(iReg, u16Value);
9784 IEM_MC_ADVANCE_RIP();
9785 IEM_MC_END();
9786 break;
9787 }
9788
9789 case IEMMODE_32BIT:
9790 {
9791 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9792 IEMOP_HLP_NO_LOCK_PREFIX();
9793
9794 IEM_MC_BEGIN(0, 1);
9795 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
9796 IEM_MC_STORE_GREG_U32(iReg, u32Value);
9797 IEM_MC_ADVANCE_RIP();
9798 IEM_MC_END();
9799 break;
9800 }
9801 case IEMMODE_64BIT:
9802 {
9803 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
9804 IEMOP_HLP_NO_LOCK_PREFIX();
9805
9806 IEM_MC_BEGIN(0, 1);
9807 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
9808 IEM_MC_STORE_GREG_U64(iReg, u64Value);
9809 IEM_MC_ADVANCE_RIP();
9810 IEM_MC_END();
9811 break;
9812 }
9813 }
9814
9815 return VINF_SUCCESS;
9816}
9817
9818
9819/** Opcode 0xb8. */
9820FNIEMOP_DEF(iemOp_eAX_Iv)
9821{
9822 IEMOP_MNEMONIC("mov rAX,IV");
9823 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX);
9824}
9825
9826
9827/** Opcode 0xb9. */
9828FNIEMOP_DEF(iemOp_eCX_Iv)
9829{
9830 IEMOP_MNEMONIC("mov rCX,IV");
9831 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX);
9832}
9833
9834
9835/** Opcode 0xba. */
9836FNIEMOP_DEF(iemOp_eDX_Iv)
9837{
9838 IEMOP_MNEMONIC("mov rDX,IV");
9839 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX);
9840}
9841
9842
9843/** Opcode 0xbb. */
9844FNIEMOP_DEF(iemOp_eBX_Iv)
9845{
9846 IEMOP_MNEMONIC("mov rBX,IV");
9847 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX);
9848}
9849
9850
9851/** Opcode 0xbc. */
9852FNIEMOP_DEF(iemOp_eSP_Iv)
9853{
9854 IEMOP_MNEMONIC("mov rSP,IV");
9855 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP);
9856}
9857
9858
9859/** Opcode 0xbd. */
9860FNIEMOP_DEF(iemOp_eBP_Iv)
9861{
9862 IEMOP_MNEMONIC("mov rBP,IV");
9863 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP);
9864}
9865
9866
9867/** Opcode 0xbe. */
9868FNIEMOP_DEF(iemOp_eSI_Iv)
9869{
9870 IEMOP_MNEMONIC("mov rSI,IV");
9871 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI);
9872}
9873
9874
9875/** Opcode 0xbf. */
9876FNIEMOP_DEF(iemOp_eDI_Iv)
9877{
9878 IEMOP_MNEMONIC("mov rDI,IV");
9879 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI);
9880}
9881
9882
9883/** Opcode 0xc0. */
9884FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
9885{
9886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9887 PCIEMOPSHIFTSIZES pImpl;
9888 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9889 {
9890 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
9891 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
9892 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
9893 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
9894 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
9895 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
9896 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
9897 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9898 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9899 }
9900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9901
9902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9903 {
9904 /* register */
9905 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9906 IEMOP_HLP_NO_LOCK_PREFIX();
9907 IEM_MC_BEGIN(3, 0);
9908 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9909 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9910 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9911 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9912 IEM_MC_REF_EFLAGS(pEFlags);
9913 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9914 IEM_MC_ADVANCE_RIP();
9915 IEM_MC_END();
9916 }
9917 else
9918 {
9919 /* memory */
9920 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9921 IEM_MC_BEGIN(3, 2);
9922 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9923 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9924 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9926
9927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9928 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9929 IEM_MC_ASSIGN(cShiftArg, cShift);
9930 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9931 IEM_MC_FETCH_EFLAGS(EFlags);
9932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9933
9934 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9935 IEM_MC_COMMIT_EFLAGS(EFlags);
9936 IEM_MC_ADVANCE_RIP();
9937 IEM_MC_END();
9938 }
9939 return VINF_SUCCESS;
9940}
9941
9942
9943/** Opcode 0xc1. */
9944FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
9945{
9946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9947 PCIEMOPSHIFTSIZES pImpl;
9948 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9949 {
9950 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
9951 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
9952 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
9953 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
9954 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
9955 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
9956 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
9957 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9958 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9959 }
9960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9961
9962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9963 {
9964 /* register */
9965 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9966 IEMOP_HLP_NO_LOCK_PREFIX();
9967 switch (pIemCpu->enmEffOpSize)
9968 {
9969 case IEMMODE_16BIT:
9970 IEM_MC_BEGIN(3, 0);
9971 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9972 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9973 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9974 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9975 IEM_MC_REF_EFLAGS(pEFlags);
9976 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9977 IEM_MC_ADVANCE_RIP();
9978 IEM_MC_END();
9979 return VINF_SUCCESS;
9980
9981 case IEMMODE_32BIT:
9982 IEM_MC_BEGIN(3, 0);
9983 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9984 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9986 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9987 IEM_MC_REF_EFLAGS(pEFlags);
9988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9989 IEM_MC_ADVANCE_RIP();
9990 IEM_MC_END();
9991 return VINF_SUCCESS;
9992
9993 case IEMMODE_64BIT:
9994 IEM_MC_BEGIN(3, 0);
9995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9996 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9998 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9999 IEM_MC_REF_EFLAGS(pEFlags);
10000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10001 IEM_MC_ADVANCE_RIP();
10002 IEM_MC_END();
10003 return VINF_SUCCESS;
10004
10005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10006 }
10007 }
10008 else
10009 {
10010 /* memory */
10011 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10012 switch (pIemCpu->enmEffOpSize)
10013 {
10014 case IEMMODE_16BIT:
10015 IEM_MC_BEGIN(3, 2);
10016 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10017 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10018 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10020
10021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10022 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10023 IEM_MC_ASSIGN(cShiftArg, cShift);
10024 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10025 IEM_MC_FETCH_EFLAGS(EFlags);
10026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10027
10028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10029 IEM_MC_COMMIT_EFLAGS(EFlags);
10030 IEM_MC_ADVANCE_RIP();
10031 IEM_MC_END();
10032 return VINF_SUCCESS;
10033
10034 case IEMMODE_32BIT:
10035 IEM_MC_BEGIN(3, 2);
10036 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10037 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10038 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10040
10041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10042 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10043 IEM_MC_ASSIGN(cShiftArg, cShift);
10044 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10045 IEM_MC_FETCH_EFLAGS(EFlags);
10046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10047
10048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10049 IEM_MC_COMMIT_EFLAGS(EFlags);
10050 IEM_MC_ADVANCE_RIP();
10051 IEM_MC_END();
10052 return VINF_SUCCESS;
10053
10054 case IEMMODE_64BIT:
10055 IEM_MC_BEGIN(3, 2);
10056 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10057 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10058 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10060
10061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10062 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10063 IEM_MC_ASSIGN(cShiftArg, cShift);
10064 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10065 IEM_MC_FETCH_EFLAGS(EFlags);
10066 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10067
10068 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10069 IEM_MC_COMMIT_EFLAGS(EFlags);
10070 IEM_MC_ADVANCE_RIP();
10071 IEM_MC_END();
10072 return VINF_SUCCESS;
10073
10074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10075 }
10076 }
10077}
10078
10079
10080/** Opcode 0xc2. */
10081FNIEMOP_DEF(iemOp_retn_Iw)
10082{
10083 IEMOP_MNEMONIC("retn Iw");
10084 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10085 IEMOP_HLP_NO_LOCK_PREFIX();
10086 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10087 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
10088}
10089
10090
10091/** Opcode 0xc3. */
10092FNIEMOP_DEF(iemOp_retn)
10093{
10094 IEMOP_MNEMONIC("retn");
10095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10096 IEMOP_HLP_NO_LOCK_PREFIX();
10097 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
10098}
10099
10100
10101/** Opcode 0xc4. */
10102FNIEMOP_DEF(iemOp_les_Gv_Mp)
10103{
10104 IEMOP_MNEMONIC("les Gv,Mp");
10105 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_ES);
10106}
10107
10108
10109/** Opcode 0xc5. */
10110FNIEMOP_DEF(iemOp_lds_Gv_Mp)
10111{
10112 IEMOP_MNEMONIC("lds Gv,Mp");
10113 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_DS);
10114}
10115
10116
10117/** Opcode 0xc6. */
10118FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
10119{
10120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10121 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10122 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10123 return IEMOP_RAISE_INVALID_OPCODE();
10124 IEMOP_MNEMONIC("mov Eb,Ib");
10125
10126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10127 {
10128 /* register access */
10129 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10130 IEM_MC_BEGIN(0, 0);
10131 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
10132 IEM_MC_ADVANCE_RIP();
10133 IEM_MC_END();
10134 }
10135 else
10136 {
10137 /* memory access. */
10138 IEM_MC_BEGIN(0, 1);
10139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10141 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10142 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
10143 IEM_MC_ADVANCE_RIP();
10144 IEM_MC_END();
10145 }
10146 return VINF_SUCCESS;
10147}
10148
10149
10150/** Opcode 0xc7. */
10151FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
10152{
10153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10154 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10155 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10156 return IEMOP_RAISE_INVALID_OPCODE();
10157 IEMOP_MNEMONIC("mov Ev,Iz");
10158
10159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10160 {
10161 /* register access */
10162 switch (pIemCpu->enmEffOpSize)
10163 {
10164 case IEMMODE_16BIT:
10165 IEM_MC_BEGIN(0, 0);
10166 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10167 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
10168 IEM_MC_ADVANCE_RIP();
10169 IEM_MC_END();
10170 return VINF_SUCCESS;
10171
10172 case IEMMODE_32BIT:
10173 IEM_MC_BEGIN(0, 0);
10174 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10175 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
10176 IEM_MC_ADVANCE_RIP();
10177 IEM_MC_END();
10178 return VINF_SUCCESS;
10179
10180 case IEMMODE_64BIT:
10181 IEM_MC_BEGIN(0, 0);
10182 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
10183 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
10184 IEM_MC_ADVANCE_RIP();
10185 IEM_MC_END();
10186 return VINF_SUCCESS;
10187
10188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10189 }
10190 }
10191 else
10192 {
10193 /* memory access. */
10194 switch (pIemCpu->enmEffOpSize)
10195 {
10196 case IEMMODE_16BIT:
10197 IEM_MC_BEGIN(0, 1);
10198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10201 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
10202 IEM_MC_ADVANCE_RIP();
10203 IEM_MC_END();
10204 return VINF_SUCCESS;
10205
10206 case IEMMODE_32BIT:
10207 IEM_MC_BEGIN(0, 1);
10208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10210 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10211 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
10212 IEM_MC_ADVANCE_RIP();
10213 IEM_MC_END();
10214 return VINF_SUCCESS;
10215
10216 case IEMMODE_64BIT:
10217 IEM_MC_BEGIN(0, 1);
10218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10220 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
10221 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
10222 IEM_MC_ADVANCE_RIP();
10223 IEM_MC_END();
10224 return VINF_SUCCESS;
10225
10226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10227 }
10228 }
10229}
10230
10231
10232
10233
10234/** Opcode 0xc8. */
10235FNIEMOP_STUB(iemOp_enter_Iw_Ib);
10236
10237
10238/** Opcode 0xc9. */
10239FNIEMOP_DEF(iemOp_leave)
10240{
10241 IEMOP_MNEMONIC("retn");
10242 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10243 IEMOP_HLP_NO_LOCK_PREFIX();
10244 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
10245}
10246
10247
10248/** Opcode 0xca. */
10249FNIEMOP_DEF(iemOp_retf_Iw)
10250{
10251 IEMOP_MNEMONIC("retf Iw");
10252 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10253 IEMOP_HLP_NO_LOCK_PREFIX();
10254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10255 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
10256}
10257
10258
10259/** Opcode 0xcb. */
10260FNIEMOP_DEF(iemOp_retf)
10261{
10262 IEMOP_MNEMONIC("retf");
10263 IEMOP_HLP_NO_LOCK_PREFIX();
10264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10265 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
10266}
10267
10268
10269/** Opcode 0xcc. */
10270FNIEMOP_DEF(iemOp_int_3)
10271{
10272 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
10273}
10274
10275
10276/** Opcode 0xcd. */
10277FNIEMOP_DEF(iemOp_int_Ib)
10278{
10279 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
10280 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
10281}
10282
10283
10284/** Opcode 0xce. */
10285FNIEMOP_DEF(iemOp_into)
10286{
10287 IEM_MC_BEGIN(2, 0);
10288 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
10289 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
10290 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
10291 IEM_MC_END();
10292 return VINF_SUCCESS;
10293}
10294
10295
10296/** Opcode 0xcf. */
10297FNIEMOP_DEF(iemOp_iret)
10298{
10299 IEMOP_MNEMONIC("iret");
10300 IEMOP_HLP_NO_LOCK_PREFIX();
10301 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
10302}
10303
10304
10305/** Opcode 0xd0. */
10306FNIEMOP_DEF(iemOp_Grp2_Eb_1)
10307{
10308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10309 PCIEMOPSHIFTSIZES pImpl;
10310 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10311 {
10312 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
10313 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
10314 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
10315 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
10316 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
10317 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
10318 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
10319 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10320 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10321 }
10322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10323
10324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10325 {
10326 /* register */
10327 IEMOP_HLP_NO_LOCK_PREFIX();
10328 IEM_MC_BEGIN(3, 0);
10329 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10330 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10332 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10333 IEM_MC_REF_EFLAGS(pEFlags);
10334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10335 IEM_MC_ADVANCE_RIP();
10336 IEM_MC_END();
10337 }
10338 else
10339 {
10340 /* memory */
10341 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10342 IEM_MC_BEGIN(3, 2);
10343 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10344 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10345 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10347
10348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10349 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10350 IEM_MC_FETCH_EFLAGS(EFlags);
10351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10352
10353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10354 IEM_MC_COMMIT_EFLAGS(EFlags);
10355 IEM_MC_ADVANCE_RIP();
10356 IEM_MC_END();
10357 }
10358 return VINF_SUCCESS;
10359}
10360
10361
10362
10363/** Opcode 0xd1. */
10364FNIEMOP_DEF(iemOp_Grp2_Ev_1)
10365{
10366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10367 PCIEMOPSHIFTSIZES pImpl;
10368 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10369 {
10370 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
10371 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
10372 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
10373 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
10374 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
10375 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
10376 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
10377 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10378 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10379 }
10380 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10381
10382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10383 {
10384 /* register */
10385 IEMOP_HLP_NO_LOCK_PREFIX();
10386 switch (pIemCpu->enmEffOpSize)
10387 {
10388 case IEMMODE_16BIT:
10389 IEM_MC_BEGIN(3, 0);
10390 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10391 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10392 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10393 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10394 IEM_MC_REF_EFLAGS(pEFlags);
10395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10396 IEM_MC_ADVANCE_RIP();
10397 IEM_MC_END();
10398 return VINF_SUCCESS;
10399
10400 case IEMMODE_32BIT:
10401 IEM_MC_BEGIN(3, 0);
10402 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10403 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10404 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10405 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10406 IEM_MC_REF_EFLAGS(pEFlags);
10407 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10408 IEM_MC_ADVANCE_RIP();
10409 IEM_MC_END();
10410 return VINF_SUCCESS;
10411
10412 case IEMMODE_64BIT:
10413 IEM_MC_BEGIN(3, 0);
10414 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10415 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10416 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10417 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10418 IEM_MC_REF_EFLAGS(pEFlags);
10419 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10420 IEM_MC_ADVANCE_RIP();
10421 IEM_MC_END();
10422 return VINF_SUCCESS;
10423
10424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10425 }
10426 }
10427 else
10428 {
10429 /* memory */
10430 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10431 switch (pIemCpu->enmEffOpSize)
10432 {
10433 case IEMMODE_16BIT:
10434 IEM_MC_BEGIN(3, 2);
10435 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10436 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10437 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10439
10440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10441 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10442 IEM_MC_FETCH_EFLAGS(EFlags);
10443 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10444
10445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10446 IEM_MC_COMMIT_EFLAGS(EFlags);
10447 IEM_MC_ADVANCE_RIP();
10448 IEM_MC_END();
10449 return VINF_SUCCESS;
10450
10451 case IEMMODE_32BIT:
10452 IEM_MC_BEGIN(3, 2);
10453 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10454 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10457
10458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10459 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10460 IEM_MC_FETCH_EFLAGS(EFlags);
10461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10462
10463 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10464 IEM_MC_COMMIT_EFLAGS(EFlags);
10465 IEM_MC_ADVANCE_RIP();
10466 IEM_MC_END();
10467 return VINF_SUCCESS;
10468
10469 case IEMMODE_64BIT:
10470 IEM_MC_BEGIN(3, 2);
10471 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10472 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10473 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10475
10476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10477 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10478 IEM_MC_FETCH_EFLAGS(EFlags);
10479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10480
10481 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10482 IEM_MC_COMMIT_EFLAGS(EFlags);
10483 IEM_MC_ADVANCE_RIP();
10484 IEM_MC_END();
10485 return VINF_SUCCESS;
10486
10487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10488 }
10489 }
10490}
10491
10492
10493/** Opcode 0xd2. */
10494FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
10495{
10496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10497 PCIEMOPSHIFTSIZES pImpl;
10498 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10499 {
10500 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
10501 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
10502 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
10503 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
10504 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
10505 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
10506 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
10507 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10508 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
10509 }
10510 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10511
10512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10513 {
10514 /* register */
10515 IEMOP_HLP_NO_LOCK_PREFIX();
10516 IEM_MC_BEGIN(3, 0);
10517 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10518 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10519 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10520 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10521 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10522 IEM_MC_REF_EFLAGS(pEFlags);
10523 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10524 IEM_MC_ADVANCE_RIP();
10525 IEM_MC_END();
10526 }
10527 else
10528 {
10529 /* memory */
10530 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10531 IEM_MC_BEGIN(3, 2);
10532 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10533 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10534 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10536
10537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10538 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10539 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10540 IEM_MC_FETCH_EFLAGS(EFlags);
10541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10542
10543 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10544 IEM_MC_COMMIT_EFLAGS(EFlags);
10545 IEM_MC_ADVANCE_RIP();
10546 IEM_MC_END();
10547 }
10548 return VINF_SUCCESS;
10549}
10550
10551
10552/** Opcode 0xd3. */
10553FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10554{
10555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10556 PCIEMOPSHIFTSIZES pImpl;
10557 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10558 {
10559 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
10560 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
10561 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
10562 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
10563 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
10564 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
10565 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
10566 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10567 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10568 }
10569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10570
10571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10572 {
10573 /* register */
10574 IEMOP_HLP_NO_LOCK_PREFIX();
10575 switch (pIemCpu->enmEffOpSize)
10576 {
10577 case IEMMODE_16BIT:
10578 IEM_MC_BEGIN(3, 0);
10579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10580 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10582 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10583 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10584 IEM_MC_REF_EFLAGS(pEFlags);
10585 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10586 IEM_MC_ADVANCE_RIP();
10587 IEM_MC_END();
10588 return VINF_SUCCESS;
10589
10590 case IEMMODE_32BIT:
10591 IEM_MC_BEGIN(3, 0);
10592 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10593 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10595 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10596 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10597 IEM_MC_REF_EFLAGS(pEFlags);
10598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10599 IEM_MC_ADVANCE_RIP();
10600 IEM_MC_END();
10601 return VINF_SUCCESS;
10602
10603 case IEMMODE_64BIT:
10604 IEM_MC_BEGIN(3, 0);
10605 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10606 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10607 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10608 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10609 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10610 IEM_MC_REF_EFLAGS(pEFlags);
10611 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10612 IEM_MC_ADVANCE_RIP();
10613 IEM_MC_END();
10614 return VINF_SUCCESS;
10615
10616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10617 }
10618 }
10619 else
10620 {
10621 /* memory */
10622 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10623 switch (pIemCpu->enmEffOpSize)
10624 {
10625 case IEMMODE_16BIT:
10626 IEM_MC_BEGIN(3, 2);
10627 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10628 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10629 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10631
10632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10633 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10634 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10635 IEM_MC_FETCH_EFLAGS(EFlags);
10636 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10637
10638 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10639 IEM_MC_COMMIT_EFLAGS(EFlags);
10640 IEM_MC_ADVANCE_RIP();
10641 IEM_MC_END();
10642 return VINF_SUCCESS;
10643
10644 case IEMMODE_32BIT:
10645 IEM_MC_BEGIN(3, 2);
10646 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10647 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10648 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10650
10651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10652 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10653 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10654 IEM_MC_FETCH_EFLAGS(EFlags);
10655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10656
10657 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10658 IEM_MC_COMMIT_EFLAGS(EFlags);
10659 IEM_MC_ADVANCE_RIP();
10660 IEM_MC_END();
10661 return VINF_SUCCESS;
10662
10663 case IEMMODE_64BIT:
10664 IEM_MC_BEGIN(3, 2);
10665 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10666 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10667 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10669
10670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10671 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10672 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10673 IEM_MC_FETCH_EFLAGS(EFlags);
10674 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10675
10676 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10677 IEM_MC_COMMIT_EFLAGS(EFlags);
10678 IEM_MC_ADVANCE_RIP();
10679 IEM_MC_END();
10680 return VINF_SUCCESS;
10681
10682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10683 }
10684 }
10685}
10686
10687/** Opcode 0xd4. */
10688FNIEMOP_DEF(iemOp_aam_Ib)
10689{
10690 IEMOP_MNEMONIC("aam Ib");
10691 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10692 IEMOP_HLP_NO_LOCK_PREFIX();
10693 IEMOP_HLP_NO_64BIT();
10694 if (!bImm)
10695 return IEMOP_RAISE_DIVIDE_ERROR();
10696 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
10697}
10698
10699
10700/** Opcode 0xd5. */
10701FNIEMOP_DEF(iemOp_aad_Ib)
10702{
10703 IEMOP_MNEMONIC("aad Ib");
10704 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10705 IEMOP_HLP_NO_LOCK_PREFIX();
10706 IEMOP_HLP_NO_64BIT();
10707 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
10708}
10709
10710
10711/** Opcode 0xd7. */
10712FNIEMOP_DEF(iemOp_xlat)
10713{
10714 IEMOP_MNEMONIC("xlat");
10715 IEMOP_HLP_NO_LOCK_PREFIX();
10716 switch (pIemCpu->enmEffAddrMode)
10717 {
10718 case IEMMODE_16BIT:
10719 IEM_MC_BEGIN(2, 0);
10720 IEM_MC_LOCAL(uint8_t, u8Tmp);
10721 IEM_MC_LOCAL(uint16_t, u16Addr);
10722 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10723 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10724 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
10725 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10726 IEM_MC_ADVANCE_RIP();
10727 IEM_MC_END();
10728 return VINF_SUCCESS;
10729
10730 case IEMMODE_32BIT:
10731 IEM_MC_BEGIN(2, 0);
10732 IEM_MC_LOCAL(uint8_t, u8Tmp);
10733 IEM_MC_LOCAL(uint32_t, u32Addr);
10734 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10735 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10736 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
10737 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10738 IEM_MC_ADVANCE_RIP();
10739 IEM_MC_END();
10740 return VINF_SUCCESS;
10741
10742 case IEMMODE_64BIT:
10743 IEM_MC_BEGIN(2, 0);
10744 IEM_MC_LOCAL(uint8_t, u8Tmp);
10745 IEM_MC_LOCAL(uint64_t, u64Addr);
10746 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10747 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10748 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
10749 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10750 IEM_MC_ADVANCE_RIP();
10751 IEM_MC_END();
10752 return VINF_SUCCESS;
10753
10754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10755 }
10756}
10757
10758
10759/**
10760 * Common worker for FPU instructions working on ST0 and STn, and storing the
10761 * result in ST0.
10762 *
10763 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10764 */
10765FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10766{
10767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10768
10769 IEM_MC_BEGIN(3, 1);
10770 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10771 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10772 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10773 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10774
10775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10776 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10777 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
10778 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10779 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10780 IEM_MC_ELSE()
10781 IEM_MC_FPU_STACK_UNDERFLOW(0);
10782 IEM_MC_ENDIF();
10783 IEM_MC_ADVANCE_RIP();
10784
10785 IEM_MC_END();
10786 return VINF_SUCCESS;
10787}
10788
10789
10790/**
10791 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10792 * flags.
10793 *
10794 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10795 */
10796FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10797{
10798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10799
10800 IEM_MC_BEGIN(3, 1);
10801 IEM_MC_LOCAL(uint16_t, u16Fsw);
10802 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10803 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10804 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10805
10806 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10807 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10808 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
10809 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10810 IEM_MC_UPDATE_FSW(u16Fsw);
10811 IEM_MC_ELSE()
10812 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
10813 IEM_MC_ENDIF();
10814 IEM_MC_ADVANCE_RIP();
10815
10816 IEM_MC_END();
10817 return VINF_SUCCESS;
10818}
10819
10820
10821/**
10822 * Common worker for FPU instructions working on ST0 and STn, only affecting
10823 * flags, and popping when done.
10824 *
10825 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10826 */
10827FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10828{
10829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10830
10831 IEM_MC_BEGIN(3, 1);
10832 IEM_MC_LOCAL(uint16_t, u16Fsw);
10833 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10834 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10835 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10836
10837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10839 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
10840 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10841 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
10842 IEM_MC_ELSE()
10843 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
10844 IEM_MC_ENDIF();
10845 IEM_MC_ADVANCE_RIP();
10846
10847 IEM_MC_END();
10848 return VINF_SUCCESS;
10849}
10850
10851
10852/** Opcode 0xd8 11/0. */
10853FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10854{
10855 IEMOP_MNEMONIC("fadd st0,stN");
10856 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10857}
10858
10859
10860/** Opcode 0xd8 11/1. */
10861FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10862{
10863 IEMOP_MNEMONIC("fmul st0,stN");
10864 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10865}
10866
10867
10868/** Opcode 0xd8 11/2. */
10869FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10870{
10871 IEMOP_MNEMONIC("fcom st0,stN");
10872 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10873}
10874
10875
10876/** Opcode 0xd8 11/3. */
10877FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10878{
10879 IEMOP_MNEMONIC("fcomp st0,stN");
10880 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10881}
10882
10883
10884/** Opcode 0xd8 11/4. */
10885FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10886{
10887 IEMOP_MNEMONIC("fsub st0,stN");
10888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10889}
10890
10891
10892/** Opcode 0xd8 11/5. */
10893FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10894{
10895 IEMOP_MNEMONIC("fsubr st0,stN");
10896 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10897}
10898
10899
10900/** Opcode 0xd8 11/6. */
10901FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10902{
10903 IEMOP_MNEMONIC("fdiv st0,stN");
10904 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10905}
10906
10907
10908/** Opcode 0xd8 11/7. */
10909FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10910{
10911 IEMOP_MNEMONIC("fdivr st0,stN");
10912 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10913}
10914
10915
10916/**
10917 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10918 * the result in ST0.
10919 *
10920 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10921 */
10922FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10923{
10924 IEM_MC_BEGIN(3, 3);
10925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10926 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10927 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10928 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10930 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10931
10932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10934
10935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10936 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10937 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
10938
10939 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
10940 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10941 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10942 IEM_MC_ELSE()
10943 IEM_MC_FPU_STACK_UNDERFLOW(0);
10944 IEM_MC_ENDIF();
10945 IEM_MC_ADVANCE_RIP();
10946
10947 IEM_MC_END();
10948 return VINF_SUCCESS;
10949}
10950
10951
10952/** Opcode 0xd8 !11/0. */
10953FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10954{
10955 IEMOP_MNEMONIC("fadd st0,m32r");
10956 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10957}
10958
10959
10960/** Opcode 0xd8 !11/1. */
10961FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10962{
10963 IEMOP_MNEMONIC("fmul st0,m32r");
10964 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10965}
10966
10967
10968/** Opcode 0xd8 !11/2. */
10969FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10970{
10971 IEMOP_MNEMONIC("fcom st0,m32r");
10972
10973 IEM_MC_BEGIN(3, 3);
10974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10975 IEM_MC_LOCAL(uint16_t, u16Fsw);
10976 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10977 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10978 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10979 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10980
10981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10983
10984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10986 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
10987
10988 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
10989 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10990 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
10991 IEM_MC_ELSE()
10992 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
10993 IEM_MC_ENDIF();
10994 IEM_MC_ADVANCE_RIP();
10995
10996 IEM_MC_END();
10997 return VINF_SUCCESS;
10998}
10999
11000
11001/** Opcode 0xd8 !11/3. */
11002FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
11003{
11004 IEMOP_MNEMONIC("fcomp st0,m32r");
11005
11006 IEM_MC_BEGIN(3, 3);
11007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11008 IEM_MC_LOCAL(uint16_t, u16Fsw);
11009 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11010 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11011 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11012 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11013
11014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11016
11017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11019 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11020
11021 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11022 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
11023 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11024 IEM_MC_ELSE()
11025 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11026 IEM_MC_ENDIF();
11027 IEM_MC_ADVANCE_RIP();
11028
11029 IEM_MC_END();
11030 return VINF_SUCCESS;
11031}
11032
11033
11034/** Opcode 0xd8 !11/4. */
11035FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
11036{
11037 IEMOP_MNEMONIC("fsub st0,m32r");
11038 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
11039}
11040
11041
11042/** Opcode 0xd8 !11/5. */
11043FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
11044{
11045 IEMOP_MNEMONIC("fsubr st0,m32r");
11046 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
11047}
11048
11049
11050/** Opcode 0xd8 !11/6. */
11051FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
11052{
11053 IEMOP_MNEMONIC("fdiv st0,m32r");
11054 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
11055}
11056
11057
11058/** Opcode 0xd8 !11/7. */
11059FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
11060{
11061 IEMOP_MNEMONIC("fdivr st0,m32r");
11062 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
11063}
11064
11065
11066/** Opcode 0xd8. */
11067FNIEMOP_DEF(iemOp_EscF0)
11068{
11069 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
11070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11071
11072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11073 {
11074 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11075 {
11076 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
11077 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
11078 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
11079 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11080 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
11081 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
11082 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
11083 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
11084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11085 }
11086 }
11087 else
11088 {
11089 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11090 {
11091 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
11092 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
11093 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
11094 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
11095 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
11096 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
11097 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
11098 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
11099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11100 }
11101 }
11102}
11103
11104
11105/** Opcode 0xd9 /0 mem32real
11106 * @sa iemOp_fld_m64r */
11107FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
11108{
11109 IEMOP_MNEMONIC("fld m32r");
11110
11111 IEM_MC_BEGIN(2, 3);
11112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11113 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11114 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
11115 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11116 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
11117
11118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11120
11121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11123 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
11124
11125 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11126 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
11127 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
11128 IEM_MC_ELSE()
11129 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
11130 IEM_MC_ENDIF();
11131 IEM_MC_ADVANCE_RIP();
11132
11133 IEM_MC_END();
11134 return VINF_SUCCESS;
11135}
11136
11137
11138/** Opcode 0xd9 !11/2 mem32real */
11139FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
11140{
11141 IEMOP_MNEMONIC("fst m32r");
11142 IEM_MC_BEGIN(3, 2);
11143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11144 IEM_MC_LOCAL(uint16_t, u16Fsw);
11145 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11146 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11147 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11148
11149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11153
11154 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11155 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11156 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11157 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11158 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11159 IEM_MC_ELSE()
11160 IEM_MC_IF_FCW_IM()
11161 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11162 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11163 IEM_MC_ENDIF();
11164 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11165 IEM_MC_ENDIF();
11166 IEM_MC_ADVANCE_RIP();
11167
11168 IEM_MC_END();
11169 return VINF_SUCCESS;
11170}
11171
11172
11173/** Opcode 0xd9 !11/3 */
11174FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
11175{
11176 IEMOP_MNEMONIC("fstp m32r");
11177 IEM_MC_BEGIN(3, 2);
11178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11179 IEM_MC_LOCAL(uint16_t, u16Fsw);
11180 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11181 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11182 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11183
11184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11186 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11187 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11188
11189 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11190 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11191 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11192 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11193 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11194 IEM_MC_ELSE()
11195 IEM_MC_IF_FCW_IM()
11196 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11197 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11198 IEM_MC_ENDIF();
11199 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11200 IEM_MC_ENDIF();
11201 IEM_MC_ADVANCE_RIP();
11202
11203 IEM_MC_END();
11204 return VINF_SUCCESS;
11205}
11206
11207
11208/** Opcode 0xd9 !11/4 */
11209FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
11210{
11211 IEMOP_MNEMONIC("fldenv m14/28byte");
11212 IEM_MC_BEGIN(3, 0);
11213 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11214 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11215 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11219 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11220 IEM_MC_END();
11221 return VINF_SUCCESS;
11222}
11223
11224
11225/** Opcode 0xd9 !11/5 */
11226FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
11227{
11228 IEMOP_MNEMONIC("fldcw m2byte");
11229 IEM_MC_BEGIN(1, 1);
11230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11231 IEM_MC_ARG(uint16_t, u16Fsw, 0);
11232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11235 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11236 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
11237 IEM_MC_END();
11238 return VINF_SUCCESS;
11239}
11240
11241
11242/** Opcode 0xd9 !11/6 */
11243FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
11244{
11245 IEMOP_MNEMONIC("fstenv m14/28byte");
11246 IEM_MC_BEGIN(3, 0);
11247 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11248 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11249 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11253 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
11254 IEM_MC_END();
11255 return VINF_SUCCESS;
11256}
11257
11258
11259/** Opcode 0xd9 !11/7 */
11260FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
11261{
11262 IEMOP_MNEMONIC("fnstcw m2byte");
11263 IEM_MC_BEGIN(2, 0);
11264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11265 IEM_MC_LOCAL(uint16_t, u16Fcw);
11266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11269 IEM_MC_FETCH_FSW(u16Fcw);
11270 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
11271 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11272 IEM_MC_END();
11273 return VINF_SUCCESS;
11274}
11275
11276
11277/** Opcode 0xd9 0xc9, 0xd9 0xd8-0xdf, ++?. */
11278FNIEMOP_DEF(iemOp_fnop)
11279{
11280 IEMOP_MNEMONIC("fnop");
11281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11282
11283 IEM_MC_BEGIN(0, 0);
11284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11286 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
11287 * intel optimizations. Investigate. */
11288 IEM_MC_UPDATE_FPU_OPCODE_IP();
11289 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11290 IEM_MC_END();
11291 return VINF_SUCCESS;
11292}
11293
11294
11295/** Opcode 0xd9 11/0 stN */
11296FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
11297{
11298 IEMOP_MNEMONIC("fld stN");
11299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11300
11301 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11302 * indicates that it does. */
11303 IEM_MC_BEGIN(0, 2);
11304 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11305 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11308 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
11309 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11310 IEM_MC_PUSH_FPU_RESULT(FpuRes);
11311 IEM_MC_ELSE()
11312 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
11313 IEM_MC_ENDIF();
11314 IEM_MC_ADVANCE_RIP();
11315 IEM_MC_END();
11316
11317 return VINF_SUCCESS;
11318}
11319
11320
11321/** Opcode 0xd9 11/3 stN */
11322FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
11323{
11324 IEMOP_MNEMONIC("fxch stN");
11325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11326
11327 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11328 * indicates that it does. */
11329 IEM_MC_BEGIN(1, 3);
11330 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
11331 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
11332 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11333 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
11334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11336 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11337 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
11338 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
11339 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11340 IEM_MC_ELSE()
11341 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
11342 IEM_MC_ENDIF();
11343 IEM_MC_ADVANCE_RIP();
11344 IEM_MC_END();
11345
11346 return VINF_SUCCESS;
11347}
11348
11349
11350/** Opcode 0xd9 11/4, 0xdd 11/2. */
11351FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
11352{
11353 IEMOP_MNEMONIC("fstp st0,stN");
11354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11355
11356 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
11357 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
11358 if (!iDstReg)
11359 {
11360 IEM_MC_BEGIN(0, 1);
11361 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
11362 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11363 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11364 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
11365 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
11366 IEM_MC_ELSE()
11367 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
11368 IEM_MC_ENDIF();
11369 IEM_MC_ADVANCE_RIP();
11370 IEM_MC_END();
11371 }
11372 else
11373 {
11374 IEM_MC_BEGIN(0, 2);
11375 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11376 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11378 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11379 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11380 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11381 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
11382 IEM_MC_ELSE()
11383 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
11384 IEM_MC_ENDIF();
11385 IEM_MC_ADVANCE_RIP();
11386 IEM_MC_END();
11387 }
11388 return VINF_SUCCESS;
11389}
11390
11391
11392/**
11393 * Common worker for FPU instructions working on ST0 and replaces it with the
11394 * result, i.e. unary operators.
11395 *
11396 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11397 */
11398FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11399{
11400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11401
11402 IEM_MC_BEGIN(2, 1);
11403 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11404 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11405 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11406
11407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11409 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11410 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11411 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11412 IEM_MC_ELSE()
11413 IEM_MC_FPU_STACK_UNDERFLOW(0);
11414 IEM_MC_ENDIF();
11415 IEM_MC_ADVANCE_RIP();
11416
11417 IEM_MC_END();
11418 return VINF_SUCCESS;
11419}
11420
11421
11422/** Opcode 0xd9 0xe0. */
11423FNIEMOP_DEF(iemOp_fchs)
11424{
11425 IEMOP_MNEMONIC("fchs st0");
11426 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11427}
11428
11429
11430/** Opcode 0xd9 0xe1. */
11431FNIEMOP_DEF(iemOp_fabs)
11432{
11433 IEMOP_MNEMONIC("fabs st0");
11434 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11435}
11436
11437
11438/**
11439 * Common worker for FPU instructions working on ST0 and only returns FSW.
11440 *
11441 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11442 */
11443FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
11444{
11445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11446
11447 IEM_MC_BEGIN(2, 1);
11448 IEM_MC_LOCAL(uint16_t, u16Fsw);
11449 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11450 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11451
11452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11454 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11455 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
11456 IEM_MC_UPDATE_FSW(u16Fsw);
11457 IEM_MC_ELSE()
11458 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
11459 IEM_MC_ENDIF();
11460 IEM_MC_ADVANCE_RIP();
11461
11462 IEM_MC_END();
11463 return VINF_SUCCESS;
11464}
11465
11466
11467/** Opcode 0xd9 0xe4. */
11468FNIEMOP_DEF(iemOp_ftst)
11469{
11470 IEMOP_MNEMONIC("ftst st0");
11471 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
11472}
11473
11474
11475/** Opcode 0xd9 0xe5. */
11476FNIEMOP_DEF(iemOp_fxam)
11477{
11478 IEMOP_MNEMONIC("fxam st0");
11479 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
11480}
11481
11482
11483/**
11484 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11485 *
11486 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11487 */
11488FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11489{
11490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11491
11492 IEM_MC_BEGIN(1, 1);
11493 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11494 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11495
11496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11498 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11499 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11500 IEM_MC_PUSH_FPU_RESULT(FpuRes);
11501 IEM_MC_ELSE()
11502 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
11503 IEM_MC_ENDIF();
11504 IEM_MC_ADVANCE_RIP();
11505
11506 IEM_MC_END();
11507 return VINF_SUCCESS;
11508}
11509
11510
11511/** Opcode 0xd9 0xe8. */
11512FNIEMOP_DEF(iemOp_fld1)
11513{
11514 IEMOP_MNEMONIC("fld1");
11515 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11516}
11517
11518
11519/** Opcode 0xd9 0xe9. */
11520FNIEMOP_DEF(iemOp_fldl2t)
11521{
11522 IEMOP_MNEMONIC("fldl2t");
11523 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11524}
11525
11526
11527/** Opcode 0xd9 0xea. */
11528FNIEMOP_DEF(iemOp_fldl2e)
11529{
11530 IEMOP_MNEMONIC("fldl2e");
11531 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11532}
11533
11534/** Opcode 0xd9 0xeb. */
11535FNIEMOP_DEF(iemOp_fldpi)
11536{
11537 IEMOP_MNEMONIC("fldpi");
11538 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11539}
11540
11541
11542/** Opcode 0xd9 0xec. */
11543FNIEMOP_DEF(iemOp_fldlg2)
11544{
11545 IEMOP_MNEMONIC("fldlg2");
11546 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11547}
11548
11549/** Opcode 0xd9 0xed. */
11550FNIEMOP_DEF(iemOp_fldln2)
11551{
11552 IEMOP_MNEMONIC("fldln2");
11553 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11554}
11555
11556
11557/** Opcode 0xd9 0xee. */
11558FNIEMOP_DEF(iemOp_fldz)
11559{
11560 IEMOP_MNEMONIC("fldz");
11561 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11562}
11563
11564
11565/** Opcode 0xd9 0xf0. */
11566FNIEMOP_DEF(iemOp_f2xm1)
11567{
11568 IEMOP_MNEMONIC("f2xm1 st0");
11569 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11570}
11571
11572
11573/** Opcode 0xd9 0xf1. */
11574FNIEMOP_DEF(iemOp_fylx2)
11575{
11576 IEMOP_MNEMONIC("fylx2 st0");
11577 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
11578}
11579
11580
11581/**
11582 * Common worker for FPU instructions working on ST0 and having two outputs, one
11583 * replacing ST0 and one pushed onto the stack.
11584 *
11585 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11586 */
11587FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11588{
11589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11590
11591 IEM_MC_BEGIN(2, 1);
11592 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11593 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11594 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11595
11596 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11597 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11598 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11599 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11600 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
11601 IEM_MC_ELSE()
11602 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
11603 IEM_MC_ENDIF();
11604 IEM_MC_ADVANCE_RIP();
11605
11606 IEM_MC_END();
11607 return VINF_SUCCESS;
11608}
11609
11610
11611/** Opcode 0xd9 0xf2. */
11612FNIEMOP_DEF(iemOp_fptan)
11613{
11614 IEMOP_MNEMONIC("fptan st0");
11615 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11616}
11617
11618
11619/**
11620 * Common worker for FPU instructions working on STn and ST0, storing the result
11621 * in STn, and popping the stack unless IE, DE or ZE was raised.
11622 *
11623 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11624 */
11625FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11626{
11627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11628
11629 IEM_MC_BEGIN(3, 1);
11630 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11631 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11632 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11633 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11634
11635 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11637
11638 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
11639 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11640 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
11641 IEM_MC_ELSE()
11642 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
11643 IEM_MC_ENDIF();
11644 IEM_MC_ADVANCE_RIP();
11645
11646 IEM_MC_END();
11647 return VINF_SUCCESS;
11648}
11649
11650
11651/** Opcode 0xd9 0xf3. */
11652FNIEMOP_DEF(iemOp_fpatan)
11653{
11654 IEMOP_MNEMONIC("fpatan st1,st0");
11655 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11656}
11657
11658
11659/** Opcode 0xd9 0xf4. */
11660FNIEMOP_DEF(iemOp_fxtract)
11661{
11662 IEMOP_MNEMONIC("fxtract st0");
11663 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11664}
11665
11666
11667/** Opcode 0xd9 0xf5. */
11668FNIEMOP_DEF(iemOp_fprem1)
11669{
11670 IEMOP_MNEMONIC("fprem1 st0, st1");
11671 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11672}
11673
11674
11675/** Opcode 0xd9 0xf6. */
11676FNIEMOP_DEF(iemOp_fdecstp)
11677{
11678 IEMOP_MNEMONIC("fdecstp");
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11681 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11682 * FINCSTP and FDECSTP. */
11683
11684 IEM_MC_BEGIN(0,0);
11685
11686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11688
11689 IEM_MC_FPU_STACK_DEC_TOP();
11690 IEM_MC_UPDATE_FSW_CONST(0);
11691
11692 IEM_MC_END();
11693 return VINF_SUCCESS;
11694}
11695
11696
11697/** Opcode 0xd9 0xf7. */
11698FNIEMOP_DEF(iemOp_fincstp)
11699{
11700 IEMOP_MNEMONIC("fincstp");
11701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11702 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11703 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11704 * FINCSTP and FDECSTP. */
11705
11706 IEM_MC_BEGIN(0,0);
11707
11708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11710
11711 IEM_MC_FPU_STACK_INC_TOP();
11712 IEM_MC_UPDATE_FSW_CONST(0);
11713
11714 IEM_MC_END();
11715 return VINF_SUCCESS;
11716}
11717
11718
11719/** Opcode 0xd9 0xf8. */
11720FNIEMOP_DEF(iemOp_fprem)
11721{
11722 IEMOP_MNEMONIC("fprem st0, st1");
11723 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11724}
11725
11726
11727/** Opcode 0xd9 0xf9. */
11728FNIEMOP_DEF(iemOp_fyl2xp1)
11729{
11730 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
11731 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11732}
11733
11734
11735/** Opcode 0xd9 0xfa. */
11736FNIEMOP_DEF(iemOp_fsqrt)
11737{
11738 IEMOP_MNEMONIC("fsqrt st0");
11739 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11740}
11741
11742
11743/** Opcode 0xd9 0xfb. */
11744FNIEMOP_DEF(iemOp_fsincos)
11745{
11746 IEMOP_MNEMONIC("fsincos st0");
11747 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11748}
11749
11750
11751/** Opcode 0xd9 0xfc. */
11752FNIEMOP_DEF(iemOp_frndint)
11753{
11754 IEMOP_MNEMONIC("frndint st0");
11755 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11756}
11757
11758
11759/** Opcode 0xd9 0xfd. */
11760FNIEMOP_DEF(iemOp_fscale)
11761{
11762 IEMOP_MNEMONIC("fscale st0, st1");
11763 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11764}
11765
11766
11767/** Opcode 0xd9 0xfe. */
11768FNIEMOP_DEF(iemOp_fsin)
11769{
11770 IEMOP_MNEMONIC("fsin st0");
11771 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11772}
11773
11774
11775/** Opcode 0xd9 0xff. */
11776FNIEMOP_DEF(iemOp_fcos)
11777{
11778 IEMOP_MNEMONIC("fcos st0");
11779 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11780}
11781
11782
11783/** Used by iemOp_EscF1. */
11784static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11785{
11786 /* 0xe0 */ iemOp_fchs,
11787 /* 0xe1 */ iemOp_fabs,
11788 /* 0xe2 */ iemOp_Invalid,
11789 /* 0xe3 */ iemOp_Invalid,
11790 /* 0xe4 */ iemOp_ftst,
11791 /* 0xe5 */ iemOp_fxam,
11792 /* 0xe6 */ iemOp_Invalid,
11793 /* 0xe7 */ iemOp_Invalid,
11794 /* 0xe8 */ iemOp_fld1,
11795 /* 0xe9 */ iemOp_fldl2t,
11796 /* 0xea */ iemOp_fldl2e,
11797 /* 0xeb */ iemOp_fldpi,
11798 /* 0xec */ iemOp_fldlg2,
11799 /* 0xed */ iemOp_fldln2,
11800 /* 0xee */ iemOp_fldz,
11801 /* 0xef */ iemOp_Invalid,
11802 /* 0xf0 */ iemOp_f2xm1,
11803 /* 0xf1 */ iemOp_fylx2,
11804 /* 0xf2 */ iemOp_fptan,
11805 /* 0xf3 */ iemOp_fpatan,
11806 /* 0xf4 */ iemOp_fxtract,
11807 /* 0xf5 */ iemOp_fprem1,
11808 /* 0xf6 */ iemOp_fdecstp,
11809 /* 0xf7 */ iemOp_fincstp,
11810 /* 0xf8 */ iemOp_fprem,
11811 /* 0xf9 */ iemOp_fyl2xp1,
11812 /* 0xfa */ iemOp_fsqrt,
11813 /* 0xfb */ iemOp_fsincos,
11814 /* 0xfc */ iemOp_frndint,
11815 /* 0xfd */ iemOp_fscale,
11816 /* 0xfe */ iemOp_fsin,
11817 /* 0xff */ iemOp_fcos
11818};
11819
11820
11821/** Opcode 0xd9. */
11822FNIEMOP_DEF(iemOp_EscF1)
11823{
11824 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
11825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11827 {
11828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11829 {
11830 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11831 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11832 case 2:
11833 if (bRm == 0xc9)
11834 return FNIEMOP_CALL(iemOp_fnop);
11835 return IEMOP_RAISE_INVALID_OPCODE();
11836 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11837 case 4:
11838 case 5:
11839 case 6:
11840 case 7:
11841 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[(bRm & (X86_MODRM_REG_MASK |X86_MODRM_RM_MASK)) - 0xe0]);
11842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11843 }
11844 }
11845 else
11846 {
11847 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11848 {
11849 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11850 case 1: return IEMOP_RAISE_INVALID_OPCODE();
11851 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11852 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11853 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11854 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11855 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11856 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11858 }
11859 }
11860}
11861
11862
11863/** Opcode 0xda 11/0. */
11864FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11865{
11866 IEMOP_MNEMONIC("fcmovb st0,stN");
11867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11868
11869 IEM_MC_BEGIN(0, 1);
11870 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11871
11872 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11873 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11874
11875 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11876 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
11877 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11878 IEM_MC_ENDIF();
11879 IEM_MC_UPDATE_FPU_OPCODE_IP();
11880 IEM_MC_ELSE()
11881 IEM_MC_FPU_STACK_UNDERFLOW(0);
11882 IEM_MC_ENDIF();
11883 IEM_MC_ADVANCE_RIP();
11884
11885 IEM_MC_END();
11886 return VINF_SUCCESS;
11887}
11888
11889
11890/** Opcode 0xda 11/1. */
11891FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11892{
11893 IEMOP_MNEMONIC("fcmove st0,stN");
11894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11895
11896 IEM_MC_BEGIN(0, 1);
11897 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11898
11899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11901
11902 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
11904 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11905 IEM_MC_ENDIF();
11906 IEM_MC_UPDATE_FPU_OPCODE_IP();
11907 IEM_MC_ELSE()
11908 IEM_MC_FPU_STACK_UNDERFLOW(0);
11909 IEM_MC_ENDIF();
11910 IEM_MC_ADVANCE_RIP();
11911
11912 IEM_MC_END();
11913 return VINF_SUCCESS;
11914}
11915
11916
11917/** Opcode 0xda 11/2. */
11918FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11919{
11920 IEMOP_MNEMONIC("fcmovbe st0,stN");
11921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11922
11923 IEM_MC_BEGIN(0, 1);
11924 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11925
11926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11928
11929 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11930 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
11931 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11932 IEM_MC_ENDIF();
11933 IEM_MC_UPDATE_FPU_OPCODE_IP();
11934 IEM_MC_ELSE()
11935 IEM_MC_FPU_STACK_UNDERFLOW(0);
11936 IEM_MC_ENDIF();
11937 IEM_MC_ADVANCE_RIP();
11938
11939 IEM_MC_END();
11940 return VINF_SUCCESS;
11941}
11942
11943
11944/** Opcode 0xda 11/3. */
11945FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11946{
11947 IEMOP_MNEMONIC("fcmovu st0,stN");
11948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11949
11950 IEM_MC_BEGIN(0, 1);
11951 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11952
11953 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11954 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11955
11956 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11957 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
11958 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11959 IEM_MC_ENDIF();
11960 IEM_MC_UPDATE_FPU_OPCODE_IP();
11961 IEM_MC_ELSE()
11962 IEM_MC_FPU_STACK_UNDERFLOW(0);
11963 IEM_MC_ENDIF();
11964 IEM_MC_ADVANCE_RIP();
11965
11966 IEM_MC_END();
11967 return VINF_SUCCESS;
11968}
11969
11970
11971/**
11972 * Common worker for FPU instructions working on ST0 and STn, only affecting
11973 * flags, and popping twice when done.
11974 *
11975 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11976 */
11977FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11978{
11979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11980
11981 IEM_MC_BEGIN(3, 1);
11982 IEM_MC_LOCAL(uint16_t, u16Fsw);
11983 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11984 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11985 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11986
11987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11989 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
11990 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11991 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
11992 IEM_MC_ELSE()
11993 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
11994 IEM_MC_ENDIF();
11995 IEM_MC_ADVANCE_RIP();
11996
11997 IEM_MC_END();
11998 return VINF_SUCCESS;
11999}
12000
12001
12002/** Opcode 0xda 0xe9. */
12003FNIEMOP_DEF(iemOp_fucompp)
12004{
12005 IEMOP_MNEMONIC("fucompp st0,stN");
12006 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
12007}
12008
12009
12010/**
12011 * Common worker for FPU instructions working on ST0 and an m32i, and storing
12012 * the result in ST0.
12013 *
12014 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12015 */
12016FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
12017{
12018 IEM_MC_BEGIN(3, 3);
12019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12020 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12021 IEM_MC_LOCAL(int32_t, i32Val2);
12022 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12023 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12024 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12025
12026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12028
12029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12031 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12032
12033 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12034 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
12035 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12036 IEM_MC_ELSE()
12037 IEM_MC_FPU_STACK_UNDERFLOW(0);
12038 IEM_MC_ENDIF();
12039 IEM_MC_ADVANCE_RIP();
12040
12041 IEM_MC_END();
12042 return VINF_SUCCESS;
12043}
12044
12045
12046/** Opcode 0xda !11/0. */
12047FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
12048{
12049 IEMOP_MNEMONIC("fiadd m32i");
12050 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
12051}
12052
12053
12054/** Opcode 0xda !11/1. */
12055FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
12056{
12057 IEMOP_MNEMONIC("fimul m32i");
12058 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
12059}
12060
12061
12062/** Opcode 0xda !11/2. */
12063FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
12064{
12065 IEMOP_MNEMONIC("ficom st0,m32i");
12066
12067 IEM_MC_BEGIN(3, 3);
12068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12069 IEM_MC_LOCAL(uint16_t, u16Fsw);
12070 IEM_MC_LOCAL(int32_t, i32Val2);
12071 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12072 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12073 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12074
12075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12077
12078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12080 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12081
12082 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12083 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12084 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12085 IEM_MC_ELSE()
12086 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12087 IEM_MC_ENDIF();
12088 IEM_MC_ADVANCE_RIP();
12089
12090 IEM_MC_END();
12091 return VINF_SUCCESS;
12092}
12093
12094
12095/** Opcode 0xda !11/3. */
12096FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
12097{
12098 IEMOP_MNEMONIC("ficomp st0,m32i");
12099
12100 IEM_MC_BEGIN(3, 3);
12101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12102 IEM_MC_LOCAL(uint16_t, u16Fsw);
12103 IEM_MC_LOCAL(int32_t, i32Val2);
12104 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12106 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12107
12108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12110
12111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12113 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12114
12115 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12116 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12117 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12118 IEM_MC_ELSE()
12119 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12120 IEM_MC_ENDIF();
12121 IEM_MC_ADVANCE_RIP();
12122
12123 IEM_MC_END();
12124 return VINF_SUCCESS;
12125}
12126
12127
12128/** Opcode 0xda !11/4. */
12129FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
12130{
12131 IEMOP_MNEMONIC("fisub m32i");
12132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
12133}
12134
12135
12136/** Opcode 0xda !11/5. */
12137FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
12138{
12139 IEMOP_MNEMONIC("fisubr m32i");
12140 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
12141}
12142
12143
12144/** Opcode 0xda !11/6. */
12145FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
12146{
12147 IEMOP_MNEMONIC("fidiv m32i");
12148 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
12149}
12150
12151
12152/** Opcode 0xda !11/7. */
12153FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
12154{
12155 IEMOP_MNEMONIC("fidivr m32i");
12156 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
12157}
12158
12159
12160/** Opcode 0xda. */
12161FNIEMOP_DEF(iemOp_EscF2)
12162{
12163 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12166 {
12167 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12168 {
12169 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
12170 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
12171 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
12172 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
12173 case 4: return IEMOP_RAISE_INVALID_OPCODE();
12174 case 5:
12175 if (bRm == 0xe9)
12176 return FNIEMOP_CALL(iemOp_fucompp);
12177 return IEMOP_RAISE_INVALID_OPCODE();
12178 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12179 case 7: return IEMOP_RAISE_INVALID_OPCODE();
12180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12181 }
12182 }
12183 else
12184 {
12185 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12186 {
12187 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
12188 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
12189 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
12190 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
12191 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
12192 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
12193 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
12194 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
12195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12196 }
12197 }
12198}
12199
12200
12201/** Opcode 0xdb !11/0. */
12202FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
12203{
12204 IEMOP_MNEMONIC("fild m32i");
12205
12206 IEM_MC_BEGIN(2, 3);
12207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12208 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12209 IEM_MC_LOCAL(int32_t, i32Val);
12210 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12211 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
12212
12213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12215
12216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12218 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12219
12220 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12221 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
12222 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12223 IEM_MC_ELSE()
12224 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12225 IEM_MC_ENDIF();
12226 IEM_MC_ADVANCE_RIP();
12227
12228 IEM_MC_END();
12229 return VINF_SUCCESS;
12230}
12231
12232
12233/** Opcode 0xdb !11/1. */
12234FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
12235{
12236 IEMOP_MNEMONIC("fisttp m32i");
12237 IEM_MC_BEGIN(3, 2);
12238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12239 IEM_MC_LOCAL(uint16_t, u16Fsw);
12240 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12241 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12242 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12243
12244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12246 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12247 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12248
12249 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12250 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12251 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12252 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12253 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12254 IEM_MC_ELSE()
12255 IEM_MC_IF_FCW_IM()
12256 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12257 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12258 IEM_MC_ENDIF();
12259 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12260 IEM_MC_ENDIF();
12261 IEM_MC_ADVANCE_RIP();
12262
12263 IEM_MC_END();
12264 return VINF_SUCCESS;
12265}
12266
12267
12268/** Opcode 0xdb !11/2. */
12269FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
12270{
12271 IEMOP_MNEMONIC("fist m32i");
12272 IEM_MC_BEGIN(3, 2);
12273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12274 IEM_MC_LOCAL(uint16_t, u16Fsw);
12275 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12276 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12277 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12278
12279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12283
12284 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12286 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12287 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12288 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12289 IEM_MC_ELSE()
12290 IEM_MC_IF_FCW_IM()
12291 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12292 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12293 IEM_MC_ENDIF();
12294 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12295 IEM_MC_ENDIF();
12296 IEM_MC_ADVANCE_RIP();
12297
12298 IEM_MC_END();
12299 return VINF_SUCCESS;
12300}
12301
12302
12303/** Opcode 0xdb !11/3. */
12304FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
12305{
12306 IEMOP_MNEMONIC("fisttp m32i");
12307 IEM_MC_BEGIN(3, 2);
12308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12309 IEM_MC_LOCAL(uint16_t, u16Fsw);
12310 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12311 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12313
12314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12318
12319 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12320 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12321 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12322 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12323 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12324 IEM_MC_ELSE()
12325 IEM_MC_IF_FCW_IM()
12326 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12327 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12328 IEM_MC_ENDIF();
12329 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12330 IEM_MC_ENDIF();
12331 IEM_MC_ADVANCE_RIP();
12332
12333 IEM_MC_END();
12334 return VINF_SUCCESS;
12335}
12336
12337
12338/** Opcode 0xdb !11/5. */
12339FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
12340{
12341 IEMOP_MNEMONIC("fld m80r");
12342
12343 IEM_MC_BEGIN(2, 3);
12344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12345 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12346 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
12347 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12348 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
12349
12350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12352
12353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12355 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12356
12357 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12358 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12359 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12360 IEM_MC_ELSE()
12361 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12362 IEM_MC_ENDIF();
12363 IEM_MC_ADVANCE_RIP();
12364
12365 IEM_MC_END();
12366 return VINF_SUCCESS;
12367}
12368
12369
12370/** Opcode 0xdb !11/7. */
12371FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12372{
12373 IEMOP_MNEMONIC("fstp m80r");
12374 IEM_MC_BEGIN(3, 2);
12375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12376 IEM_MC_LOCAL(uint16_t, u16Fsw);
12377 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12378 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12379 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12380
12381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12385
12386 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12387 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12388 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12389 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12390 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12391 IEM_MC_ELSE()
12392 IEM_MC_IF_FCW_IM()
12393 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12394 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
12395 IEM_MC_ENDIF();
12396 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12397 IEM_MC_ENDIF();
12398 IEM_MC_ADVANCE_RIP();
12399
12400 IEM_MC_END();
12401 return VINF_SUCCESS;
12402}
12403
12404
12405/** Opcode 0xdb 11/0. */
12406FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12407{
12408 IEMOP_MNEMONIC("fcmovnb st0,stN");
12409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12410
12411 IEM_MC_BEGIN(0, 1);
12412 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12413
12414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12416
12417 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12418 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
12419 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12420 IEM_MC_ENDIF();
12421 IEM_MC_UPDATE_FPU_OPCODE_IP();
12422 IEM_MC_ELSE()
12423 IEM_MC_FPU_STACK_UNDERFLOW(0);
12424 IEM_MC_ENDIF();
12425 IEM_MC_ADVANCE_RIP();
12426
12427 IEM_MC_END();
12428 return VINF_SUCCESS;
12429}
12430
12431
12432/** Opcode 0xdb 11/1. */
12433FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12434{
12435 IEMOP_MNEMONIC("fcmovne st0,stN");
12436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12437
12438 IEM_MC_BEGIN(0, 1);
12439 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12440
12441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12443
12444 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12445 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12446 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12447 IEM_MC_ENDIF();
12448 IEM_MC_UPDATE_FPU_OPCODE_IP();
12449 IEM_MC_ELSE()
12450 IEM_MC_FPU_STACK_UNDERFLOW(0);
12451 IEM_MC_ENDIF();
12452 IEM_MC_ADVANCE_RIP();
12453
12454 IEM_MC_END();
12455 return VINF_SUCCESS;
12456}
12457
12458
12459/** Opcode 0xdb 11/2. */
12460FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12461{
12462 IEMOP_MNEMONIC("fcmovnbe st0,stN");
12463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12464
12465 IEM_MC_BEGIN(0, 1);
12466 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12467
12468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12470
12471 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12472 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
12473 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12474 IEM_MC_ENDIF();
12475 IEM_MC_UPDATE_FPU_OPCODE_IP();
12476 IEM_MC_ELSE()
12477 IEM_MC_FPU_STACK_UNDERFLOW(0);
12478 IEM_MC_ENDIF();
12479 IEM_MC_ADVANCE_RIP();
12480
12481 IEM_MC_END();
12482 return VINF_SUCCESS;
12483}
12484
12485
12486/** Opcode 0xdb 11/3. */
12487FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
12488{
12489 IEMOP_MNEMONIC("fcmovnnu st0,stN");
12490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12491
12492 IEM_MC_BEGIN(0, 1);
12493 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12494
12495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12497
12498 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12499 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
12500 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12501 IEM_MC_ENDIF();
12502 IEM_MC_UPDATE_FPU_OPCODE_IP();
12503 IEM_MC_ELSE()
12504 IEM_MC_FPU_STACK_UNDERFLOW(0);
12505 IEM_MC_ENDIF();
12506 IEM_MC_ADVANCE_RIP();
12507
12508 IEM_MC_END();
12509 return VINF_SUCCESS;
12510}
12511
12512
12513/** Opcode 0xdb 0xe0. */
12514FNIEMOP_DEF(iemOp_fneni)
12515{
12516 IEMOP_MNEMONIC("fneni (8087/ign)");
12517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12518 IEM_MC_BEGIN(0,0);
12519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12520 IEM_MC_ADVANCE_RIP();
12521 IEM_MC_END();
12522 return VINF_SUCCESS;
12523}
12524
12525
12526/** Opcode 0xdb 0xe1. */
12527FNIEMOP_DEF(iemOp_fndisi)
12528{
12529 IEMOP_MNEMONIC("fndisi (8087/ign)");
12530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12531 IEM_MC_BEGIN(0,0);
12532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12533 IEM_MC_ADVANCE_RIP();
12534 IEM_MC_END();
12535 return VINF_SUCCESS;
12536}
12537
12538
12539/** Opcode 0xdb 0xe2. */
12540FNIEMOP_DEF(iemOp_fnclex)
12541{
12542 IEMOP_MNEMONIC("fnclex");
12543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12544
12545 IEM_MC_BEGIN(0,0);
12546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12547 IEM_MC_CLEAR_FSW_EX();
12548 IEM_MC_ADVANCE_RIP();
12549 IEM_MC_END();
12550 return VINF_SUCCESS;
12551}
12552
12553
12554/** Opcode 0xdb 0xe3. */
12555FNIEMOP_DEF(iemOp_fninit)
12556{
12557 IEMOP_MNEMONIC("fninit");
12558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12559 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
12560}
12561
12562
12563/** Opcode 0xdb 0xe4. */
12564FNIEMOP_DEF(iemOp_fnsetpm)
12565{
12566 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12568 IEM_MC_BEGIN(0,0);
12569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12570 IEM_MC_ADVANCE_RIP();
12571 IEM_MC_END();
12572 return VINF_SUCCESS;
12573}
12574
12575
12576/** Opcode 0xdb 0xe5. */
12577FNIEMOP_DEF(iemOp_frstpm)
12578{
12579 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12580#if 0 /* #UDs on newer CPUs */
12581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12582 IEM_MC_BEGIN(0,0);
12583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12584 IEM_MC_ADVANCE_RIP();
12585 IEM_MC_END();
12586 return VINF_SUCCESS;
12587#else
12588 return IEMOP_RAISE_INVALID_OPCODE();
12589#endif
12590}
12591
12592
12593/** Opcode 0xdb 11/5. */
12594FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12595{
12596 IEMOP_MNEMONIC("fucomi st0,stN");
12597 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
12598}
12599
12600
12601/** Opcode 0xdb 11/6. */
12602FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12603{
12604 IEMOP_MNEMONIC("fcomi st0,stN");
12605 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
12606}
12607
12608
12609/** Opcode 0xdb. */
12610FNIEMOP_DEF(iemOp_EscF3)
12611{
12612 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12615 {
12616 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12617 {
12618 case 0: FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12619 case 1: FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12620 case 2: FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12621 case 3: FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
12622 case 4:
12623 switch (bRm)
12624 {
12625 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12626 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12627 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12628 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12629 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12630 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12631 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
12632 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
12633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12634 }
12635 break;
12636 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12637 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12638 case 7: return IEMOP_RAISE_INVALID_OPCODE();
12639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12640 }
12641 }
12642 else
12643 {
12644 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12645 {
12646 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12647 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12648 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12649 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12650 case 4: return IEMOP_RAISE_INVALID_OPCODE();
12651 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12652 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12653 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12655 }
12656 }
12657}
12658
12659
12660/**
12661 * Common worker for FPU instructions working on STn and ST0, and storing the
12662 * result in STn unless IE, DE or ZE was raised.
12663 *
12664 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12665 */
12666FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12667{
12668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12669
12670 IEM_MC_BEGIN(3, 1);
12671 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12672 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12673 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12674 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12675
12676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12678
12679 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
12680 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12681 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
12682 IEM_MC_ELSE()
12683 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
12684 IEM_MC_ENDIF();
12685 IEM_MC_ADVANCE_RIP();
12686
12687 IEM_MC_END();
12688 return VINF_SUCCESS;
12689}
12690
12691
12692/** Opcode 0xdc 11/0. */
12693FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12694{
12695 IEMOP_MNEMONIC("fadd stN,st0");
12696 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12697}
12698
12699
12700/** Opcode 0xdc 11/1. */
12701FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12702{
12703 IEMOP_MNEMONIC("fmul stN,st0");
12704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12705}
12706
12707
12708/** Opcode 0xdc 11/4. */
12709FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12710{
12711 IEMOP_MNEMONIC("fsubr stN,st0");
12712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12713}
12714
12715
12716/** Opcode 0xdc 11/5. */
12717FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12718{
12719 IEMOP_MNEMONIC("fsub stN,st0");
12720 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12721}
12722
12723
12724/** Opcode 0xdc 11/6. */
12725FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12726{
12727 IEMOP_MNEMONIC("fdivr stN,st0");
12728 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12729}
12730
12731
12732/** Opcode 0xdc 11/7. */
12733FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12734{
12735 IEMOP_MNEMONIC("fdiv stN,st0");
12736 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12737}
12738
12739
12740/**
12741 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12742 * memory operand, and storing the result in ST0.
12743 *
12744 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12745 */
12746FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12747{
12748 IEM_MC_BEGIN(3, 3);
12749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12750 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12751 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12752 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12753 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12754 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12755
12756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12760
12761 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
12762 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
12763 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12764 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
12765 IEM_MC_ELSE()
12766 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
12767 IEM_MC_ENDIF();
12768 IEM_MC_ADVANCE_RIP();
12769
12770 IEM_MC_END();
12771 return VINF_SUCCESS;
12772}
12773
12774
12775/** Opcode 0xdc !11/0. */
12776FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12777{
12778 IEMOP_MNEMONIC("fadd m64r");
12779 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12780}
12781
12782
12783/** Opcode 0xdc !11/1. */
12784FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12785{
12786 IEMOP_MNEMONIC("fmul m64r");
12787 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12788}
12789
12790
12791/** Opcode 0xdc !11/2. */
12792FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12793{
12794 IEMOP_MNEMONIC("fcom st0,m64r");
12795
12796 IEM_MC_BEGIN(3, 3);
12797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12798 IEM_MC_LOCAL(uint16_t, u16Fsw);
12799 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12800 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12802 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12803
12804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12806
12807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12809 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12810
12811 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12812 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12813 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12814 IEM_MC_ELSE()
12815 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12816 IEM_MC_ENDIF();
12817 IEM_MC_ADVANCE_RIP();
12818
12819 IEM_MC_END();
12820 return VINF_SUCCESS;
12821}
12822
12823
12824/** Opcode 0xdc !11/3. */
12825FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12826{
12827 IEMOP_MNEMONIC("fcomp st0,m64r");
12828
12829 IEM_MC_BEGIN(3, 3);
12830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12831 IEM_MC_LOCAL(uint16_t, u16Fsw);
12832 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12833 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12834 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12835 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12836
12837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12839
12840 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12841 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12842 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12843
12844 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12845 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12846 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12847 IEM_MC_ELSE()
12848 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12849 IEM_MC_ENDIF();
12850 IEM_MC_ADVANCE_RIP();
12851
12852 IEM_MC_END();
12853 return VINF_SUCCESS;
12854}
12855
12856
12857/** Opcode 0xdc !11/4. */
12858FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12859{
12860 IEMOP_MNEMONIC("fsub m64r");
12861 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12862}
12863
12864
12865/** Opcode 0xdc !11/5. */
12866FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12867{
12868 IEMOP_MNEMONIC("fsubr m64r");
12869 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12870}
12871
12872
12873/** Opcode 0xdc !11/6. */
12874FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12875{
12876 IEMOP_MNEMONIC("fdiv m64r");
12877 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12878}
12879
12880
12881/** Opcode 0xdc !11/7. */
12882FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12883{
12884 IEMOP_MNEMONIC("fdivr m64r");
12885 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12886}
12887
12888
12889/** Opcode 0xdc. */
12890FNIEMOP_DEF(iemOp_EscF4)
12891{
12892 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12895 {
12896 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12897 {
12898 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12899 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12900 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12901 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12902 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12903 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12904 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12905 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12907 }
12908 }
12909 else
12910 {
12911 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12912 {
12913 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12914 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12915 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12916 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12917 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12918 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12919 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12920 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12922 }
12923 }
12924}
12925
12926
12927/** Opcode 0xdd !11/0.
12928 * @sa iemOp_fld_m32r */
12929FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12930{
12931 IEMOP_MNEMONIC("fld m64r");
12932
12933 IEM_MC_BEGIN(2, 3);
12934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12935 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12936 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12937 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12938 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12939
12940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12942 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12943 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12944
12945 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12946 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12947 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
12948 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12949 IEM_MC_ELSE()
12950 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12951 IEM_MC_ENDIF();
12952 IEM_MC_ADVANCE_RIP();
12953
12954 IEM_MC_END();
12955 return VINF_SUCCESS;
12956}
12957
12958
12959/** Opcode 0xdd !11/0. */
12960FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12961{
12962 IEMOP_MNEMONIC("fisttp m64i");
12963 IEM_MC_BEGIN(3, 2);
12964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12965 IEM_MC_LOCAL(uint16_t, u16Fsw);
12966 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12967 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12968 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12969
12970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12973 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12974
12975 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12976 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12977 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12978 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12979 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12980 IEM_MC_ELSE()
12981 IEM_MC_IF_FCW_IM()
12982 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12983 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
12984 IEM_MC_ENDIF();
12985 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12986 IEM_MC_ENDIF();
12987 IEM_MC_ADVANCE_RIP();
12988
12989 IEM_MC_END();
12990 return VINF_SUCCESS;
12991}
12992
12993
12994/** Opcode 0xdd !11/0. */
12995FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12996{
12997 IEMOP_MNEMONIC("fst m64r");
12998 IEM_MC_BEGIN(3, 2);
12999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13000 IEM_MC_LOCAL(uint16_t, u16Fsw);
13001 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13002 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13003 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13004
13005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13007 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13008 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13009
13010 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13011 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13012 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13013 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13014 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13015 IEM_MC_ELSE()
13016 IEM_MC_IF_FCW_IM()
13017 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13018 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13019 IEM_MC_ENDIF();
13020 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13021 IEM_MC_ENDIF();
13022 IEM_MC_ADVANCE_RIP();
13023
13024 IEM_MC_END();
13025 return VINF_SUCCESS;
13026}
13027
13028
13029
13030
13031/** Opcode 0xdd !11/0. */
13032FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
13033{
13034 IEMOP_MNEMONIC("fstp m64r");
13035 IEM_MC_BEGIN(3, 2);
13036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13037 IEM_MC_LOCAL(uint16_t, u16Fsw);
13038 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13039 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13040 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13041
13042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13045 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13046
13047 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13048 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13049 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13050 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13051 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13052 IEM_MC_ELSE()
13053 IEM_MC_IF_FCW_IM()
13054 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13055 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13056 IEM_MC_ENDIF();
13057 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13058 IEM_MC_ENDIF();
13059 IEM_MC_ADVANCE_RIP();
13060
13061 IEM_MC_END();
13062 return VINF_SUCCESS;
13063}
13064
13065
13066/** Opcode 0xdd !11/0. */
13067FNIEMOP_STUB_1(iemOp_frstor, uint8_t, bRm);
13068
13069/** Opcode 0xdd !11/0. */
13070FNIEMOP_STUB_1(iemOp_fnsave, uint8_t, bRm);
13071
13072
13073/** Opcode 0xdd !11/0. */
13074FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
13075{
13076 IEMOP_MNEMONIC("fnstsw m16");
13077
13078 IEM_MC_BEGIN(0, 2);
13079 IEM_MC_LOCAL(uint16_t, u16Tmp);
13080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13081
13082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13084
13085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13086 IEM_MC_FETCH_FSW(u16Tmp);
13087 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
13088 IEM_MC_ADVANCE_RIP();
13089
13090 IEM_MC_END();
13091 return VINF_SUCCESS;
13092}
13093
13094
13095/** Opcode 0xdd 11/0. */
13096FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
13097{
13098 IEMOP_MNEMONIC("ffree stN");
13099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13100 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
13101 unmodified. */
13102
13103 IEM_MC_BEGIN(0, 0);
13104
13105 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13106 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13107
13108 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
13109 IEM_MC_UPDATE_FPU_OPCODE_IP();
13110
13111 IEM_MC_ADVANCE_RIP();
13112 IEM_MC_END();
13113 return VINF_SUCCESS;
13114}
13115
13116
13117/** Opcode 0xdd 11/1. */
13118FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
13119{
13120 IEMOP_MNEMONIC("fst st0,stN");
13121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13122
13123 IEM_MC_BEGIN(0, 2);
13124 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13125 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13126 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13127 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13128 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13129 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13130 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
13131 IEM_MC_ELSE()
13132 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
13133 IEM_MC_ENDIF();
13134 IEM_MC_ADVANCE_RIP();
13135 IEM_MC_END();
13136 return VINF_SUCCESS;
13137}
13138
13139
13140/** Opcode 0xdd 11/3. */
13141FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
13142{
13143 IEMOP_MNEMONIC("fcom st0,stN");
13144 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
13145}
13146
13147
13148/** Opcode 0xdd 11/4. */
13149FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
13150{
13151 IEMOP_MNEMONIC("fcomp st0,stN");
13152 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
13153}
13154
13155
13156/** Opcode 0xdd. */
13157FNIEMOP_DEF(iemOp_EscF5)
13158{
13159 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13162 {
13163 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13164 {
13165 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
13166 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
13167 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
13168 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
13169 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
13170 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
13171 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13172 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13174 }
13175 }
13176 else
13177 {
13178 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13179 {
13180 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
13181 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
13182 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
13183 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
13184 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
13185 case 5: return IEMOP_RAISE_INVALID_OPCODE();
13186 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
13187 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
13188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13189 }
13190 }
13191}
13192
13193
13194/** Opcode 0xde 11/0. */
13195FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
13196{
13197 IEMOP_MNEMONIC("faddp stN,st0");
13198 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
13199}
13200
13201
13202/** Opcode 0xde 11/0. */
13203FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
13204{
13205 IEMOP_MNEMONIC("fmulp stN,st0");
13206 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
13207}
13208
13209
13210/** Opcode 0xde 0xd9. */
13211FNIEMOP_DEF(iemOp_fcompp)
13212{
13213 IEMOP_MNEMONIC("fucompp st0,stN");
13214 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
13215}
13216
13217
13218/** Opcode 0xde 11/4. */
13219FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
13220{
13221 IEMOP_MNEMONIC("fsubrp stN,st0");
13222 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
13223}
13224
13225
13226/** Opcode 0xde 11/5. */
13227FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
13228{
13229 IEMOP_MNEMONIC("fsubp stN,st0");
13230 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
13231}
13232
13233
13234/** Opcode 0xde 11/6. */
13235FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
13236{
13237 IEMOP_MNEMONIC("fdivrp stN,st0");
13238 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
13239}
13240
13241
13242/** Opcode 0xde 11/7. */
13243FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
13244{
13245 IEMOP_MNEMONIC("fdivp stN,st0");
13246 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
13247}
13248
13249
13250/**
13251 * Common worker for FPU instructions working on ST0 and an m16i, and storing
13252 * the result in ST0.
13253 *
13254 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13255 */
13256FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
13257{
13258 IEM_MC_BEGIN(3, 3);
13259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13260 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13261 IEM_MC_LOCAL(int16_t, i16Val2);
13262 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13263 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13264 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13265
13266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13268
13269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13271 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13272
13273 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13274 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
13275 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13276 IEM_MC_ELSE()
13277 IEM_MC_FPU_STACK_UNDERFLOW(0);
13278 IEM_MC_ENDIF();
13279 IEM_MC_ADVANCE_RIP();
13280
13281 IEM_MC_END();
13282 return VINF_SUCCESS;
13283}
13284
13285
13286/** Opcode 0xde !11/0. */
13287FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
13288{
13289 IEMOP_MNEMONIC("fiadd m16i");
13290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
13291}
13292
13293
13294/** Opcode 0xde !11/1. */
13295FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
13296{
13297 IEMOP_MNEMONIC("fimul m16i");
13298 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13299}
13300
13301
13302/** Opcode 0xde !11/2. */
13303FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13304{
13305 IEMOP_MNEMONIC("ficom st0,m16i");
13306
13307 IEM_MC_BEGIN(3, 3);
13308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13309 IEM_MC_LOCAL(uint16_t, u16Fsw);
13310 IEM_MC_LOCAL(int16_t, i16Val2);
13311 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13313 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13314
13315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13317
13318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13320 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13321
13322 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13323 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13324 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13325 IEM_MC_ELSE()
13326 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13327 IEM_MC_ENDIF();
13328 IEM_MC_ADVANCE_RIP();
13329
13330 IEM_MC_END();
13331 return VINF_SUCCESS;
13332}
13333
13334
13335/** Opcode 0xde !11/3. */
13336FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13337{
13338 IEMOP_MNEMONIC("ficomp st0,m16i");
13339
13340 IEM_MC_BEGIN(3, 3);
13341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13342 IEM_MC_LOCAL(uint16_t, u16Fsw);
13343 IEM_MC_LOCAL(int16_t, i16Val2);
13344 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13345 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13346 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13347
13348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13350
13351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13352 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13353 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13354
13355 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13356 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13357 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13358 IEM_MC_ELSE()
13359 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13360 IEM_MC_ENDIF();
13361 IEM_MC_ADVANCE_RIP();
13362
13363 IEM_MC_END();
13364 return VINF_SUCCESS;
13365}
13366
13367
13368/** Opcode 0xde !11/4. */
13369FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13370{
13371 IEMOP_MNEMONIC("fisub m16i");
13372 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13373}
13374
13375
13376/** Opcode 0xde !11/5. */
13377FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13378{
13379 IEMOP_MNEMONIC("fisubr m16i");
13380 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13381}
13382
13383
13384/** Opcode 0xde !11/6. */
13385FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13386{
13387 IEMOP_MNEMONIC("fiadd m16i");
13388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13389}
13390
13391
13392/** Opcode 0xde !11/7. */
13393FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13394{
13395 IEMOP_MNEMONIC("fiadd m16i");
13396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13397}
13398
13399
13400/** Opcode 0xde. */
13401FNIEMOP_DEF(iemOp_EscF6)
13402{
13403 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13406 {
13407 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13408 {
13409 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13410 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13411 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13412 case 3: if (bRm == 0xd9)
13413 return FNIEMOP_CALL(iemOp_fcompp);
13414 return IEMOP_RAISE_INVALID_OPCODE();
13415 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13416 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13417 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13418 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13420 }
13421 }
13422 else
13423 {
13424 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13425 {
13426 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13427 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13428 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13429 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13430 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13431 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13432 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13433 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13435 }
13436 }
13437}
13438
13439
13440/** Opcode 0xdf 11/0.
13441 * Undocument instruction, assumed to work like ffree + fincstp. */
13442FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
13443{
13444 IEMOP_MNEMONIC("ffreep stN");
13445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13446
13447 IEM_MC_BEGIN(0, 0);
13448
13449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13451
13452 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
13453 IEM_MC_FPU_STACK_INC_TOP();
13454 IEM_MC_UPDATE_FPU_OPCODE_IP();
13455
13456 IEM_MC_ADVANCE_RIP();
13457 IEM_MC_END();
13458 return VINF_SUCCESS;
13459}
13460
13461
13462/** Opcode 0xdf 0xe0. */
13463FNIEMOP_DEF(iemOp_fnstsw_ax)
13464{
13465 IEMOP_MNEMONIC("fnstsw ax");
13466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13467
13468 IEM_MC_BEGIN(0, 1);
13469 IEM_MC_LOCAL(uint16_t, u16Tmp);
13470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13471 IEM_MC_FETCH_FSW(u16Tmp);
13472 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
13473 IEM_MC_ADVANCE_RIP();
13474 IEM_MC_END();
13475 return VINF_SUCCESS;
13476}
13477
13478
13479/** Opcode 0xdf 11/5. */
13480FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
13481{
13482 IEMOP_MNEMONIC("fcomip st0,stN");
13483 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
13484}
13485
13486
13487/** Opcode 0xdf 11/6. */
13488FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
13489{
13490 IEMOP_MNEMONIC("fcomip st0,stN");
13491 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
13492}
13493
13494
13495/** Opcode 0xdf !11/0. */
13496FNIEMOP_STUB_1(iemOp_fild_m16i, uint8_t, bRm);
13497
13498
13499/** Opcode 0xdf !11/1. */
13500FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13501{
13502 IEMOP_MNEMONIC("fisttp m16i");
13503 IEM_MC_BEGIN(3, 2);
13504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13505 IEM_MC_LOCAL(uint16_t, u16Fsw);
13506 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13507 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13508 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13509
13510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13513 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13514
13515 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13516 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13517 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13518 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
13519 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13520 IEM_MC_ELSE()
13521 IEM_MC_IF_FCW_IM()
13522 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13523 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
13524 IEM_MC_ENDIF();
13525 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13526 IEM_MC_ENDIF();
13527 IEM_MC_ADVANCE_RIP();
13528
13529 IEM_MC_END();
13530 return VINF_SUCCESS;
13531}
13532
13533
13534/** Opcode 0xdf !11/2. */
13535FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13536{
13537 IEMOP_MNEMONIC("fistp m16i");
13538 IEM_MC_BEGIN(3, 2);
13539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13540 IEM_MC_LOCAL(uint16_t, u16Fsw);
13541 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13542 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13543 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13544
13545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13547 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13548 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13549
13550 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13551 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13552 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13553 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
13554 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13555 IEM_MC_ELSE()
13556 IEM_MC_IF_FCW_IM()
13557 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13558 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
13559 IEM_MC_ENDIF();
13560 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13561 IEM_MC_ENDIF();
13562 IEM_MC_ADVANCE_RIP();
13563
13564 IEM_MC_END();
13565 return VINF_SUCCESS;
13566}
13567
13568
13569/** Opcode 0xdf !11/3. */
13570FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13571{
13572 IEMOP_MNEMONIC("fistp m16i");
13573 IEM_MC_BEGIN(3, 2);
13574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13575 IEM_MC_LOCAL(uint16_t, u16Fsw);
13576 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13577 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13578 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13579
13580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13582 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13583 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13584
13585 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13586 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13587 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13588 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
13589 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13590 IEM_MC_ELSE()
13591 IEM_MC_IF_FCW_IM()
13592 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13593 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
13594 IEM_MC_ENDIF();
13595 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13596 IEM_MC_ENDIF();
13597 IEM_MC_ADVANCE_RIP();
13598
13599 IEM_MC_END();
13600 return VINF_SUCCESS;
13601}
13602
13603
13604/** Opcode 0xdf !11/4. */
13605FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
13606
13607/** Opcode 0xdf !11/5. */
13608FNIEMOP_STUB_1(iemOp_fild_m64i, uint8_t, bRm);
13609
13610/** Opcode 0xdf !11/6. */
13611FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
13612
13613
13614/** Opcode 0xdf !11/7. */
13615FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13616{
13617 IEMOP_MNEMONIC("fistp m64i");
13618 IEM_MC_BEGIN(3, 2);
13619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13620 IEM_MC_LOCAL(uint16_t, u16Fsw);
13621 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13622 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13623 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13624
13625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13629
13630 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13631 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13632 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13633 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13634 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13635 IEM_MC_ELSE()
13636 IEM_MC_IF_FCW_IM()
13637 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13638 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
13639 IEM_MC_ENDIF();
13640 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13641 IEM_MC_ENDIF();
13642 IEM_MC_ADVANCE_RIP();
13643
13644 IEM_MC_END();
13645 return VINF_SUCCESS;
13646}
13647
13648
13649/** Opcode 0xdf. */
13650FNIEMOP_DEF(iemOp_EscF7)
13651{
13652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13654 {
13655 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13656 {
13657 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13658 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13659 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13660 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13661 case 4: if (bRm == 0xe0)
13662 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13663 return IEMOP_RAISE_INVALID_OPCODE();
13664 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13665 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13666 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13668 }
13669 }
13670 else
13671 {
13672 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13673 {
13674 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13675 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13676 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13677 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13678 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13679 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13680 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13681 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13683 }
13684 }
13685}
13686
13687
13688/** Opcode 0xe0. */
13689FNIEMOP_DEF(iemOp_loopne_Jb)
13690{
13691 IEMOP_MNEMONIC("loopne Jb");
13692 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13693 IEMOP_HLP_NO_LOCK_PREFIX();
13694 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13695
13696 switch (pIemCpu->enmEffAddrMode)
13697 {
13698 case IEMMODE_16BIT:
13699 IEM_MC_BEGIN(0,0);
13700 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13701 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13702 IEM_MC_REL_JMP_S8(i8Imm);
13703 } IEM_MC_ELSE() {
13704 IEM_MC_ADVANCE_RIP();
13705 } IEM_MC_ENDIF();
13706 IEM_MC_END();
13707 return VINF_SUCCESS;
13708
13709 case IEMMODE_32BIT:
13710 IEM_MC_BEGIN(0,0);
13711 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13712 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13713 IEM_MC_REL_JMP_S8(i8Imm);
13714 } IEM_MC_ELSE() {
13715 IEM_MC_ADVANCE_RIP();
13716 } IEM_MC_ENDIF();
13717 IEM_MC_END();
13718 return VINF_SUCCESS;
13719
13720 case IEMMODE_64BIT:
13721 IEM_MC_BEGIN(0,0);
13722 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13723 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13724 IEM_MC_REL_JMP_S8(i8Imm);
13725 } IEM_MC_ELSE() {
13726 IEM_MC_ADVANCE_RIP();
13727 } IEM_MC_ENDIF();
13728 IEM_MC_END();
13729 return VINF_SUCCESS;
13730
13731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13732 }
13733}
13734
13735
13736/** Opcode 0xe1. */
13737FNIEMOP_DEF(iemOp_loope_Jb)
13738{
13739 IEMOP_MNEMONIC("loope Jb");
13740 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13741 IEMOP_HLP_NO_LOCK_PREFIX();
13742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13743
13744 switch (pIemCpu->enmEffAddrMode)
13745 {
13746 case IEMMODE_16BIT:
13747 IEM_MC_BEGIN(0,0);
13748 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13749 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
13750 IEM_MC_REL_JMP_S8(i8Imm);
13751 } IEM_MC_ELSE() {
13752 IEM_MC_ADVANCE_RIP();
13753 } IEM_MC_ENDIF();
13754 IEM_MC_END();
13755 return VINF_SUCCESS;
13756
13757 case IEMMODE_32BIT:
13758 IEM_MC_BEGIN(0,0);
13759 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13760 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
13761 IEM_MC_REL_JMP_S8(i8Imm);
13762 } IEM_MC_ELSE() {
13763 IEM_MC_ADVANCE_RIP();
13764 } IEM_MC_ENDIF();
13765 IEM_MC_END();
13766 return VINF_SUCCESS;
13767
13768 case IEMMODE_64BIT:
13769 IEM_MC_BEGIN(0,0);
13770 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13771 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
13772 IEM_MC_REL_JMP_S8(i8Imm);
13773 } IEM_MC_ELSE() {
13774 IEM_MC_ADVANCE_RIP();
13775 } IEM_MC_ENDIF();
13776 IEM_MC_END();
13777 return VINF_SUCCESS;
13778
13779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13780 }
13781}
13782
13783
13784/** Opcode 0xe2. */
13785FNIEMOP_DEF(iemOp_loop_Jb)
13786{
13787 IEMOP_MNEMONIC("loop Jb");
13788 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13789 IEMOP_HLP_NO_LOCK_PREFIX();
13790 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13791
13792 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
13793 * using the 32-bit operand size override. How can that be restarted? See
13794 * weird pseudo code in intel manual. */
13795 switch (pIemCpu->enmEffAddrMode)
13796 {
13797 case IEMMODE_16BIT:
13798 IEM_MC_BEGIN(0,0);
13799 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13800 IEM_MC_IF_CX_IS_NZ() {
13801 IEM_MC_REL_JMP_S8(i8Imm);
13802 } IEM_MC_ELSE() {
13803 IEM_MC_ADVANCE_RIP();
13804 } IEM_MC_ENDIF();
13805 IEM_MC_END();
13806 return VINF_SUCCESS;
13807
13808 case IEMMODE_32BIT:
13809 IEM_MC_BEGIN(0,0);
13810 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13811 IEM_MC_IF_ECX_IS_NZ() {
13812 IEM_MC_REL_JMP_S8(i8Imm);
13813 } IEM_MC_ELSE() {
13814 IEM_MC_ADVANCE_RIP();
13815 } IEM_MC_ENDIF();
13816 IEM_MC_END();
13817 return VINF_SUCCESS;
13818
13819 case IEMMODE_64BIT:
13820 IEM_MC_BEGIN(0,0);
13821 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13822 IEM_MC_IF_RCX_IS_NZ() {
13823 IEM_MC_REL_JMP_S8(i8Imm);
13824 } IEM_MC_ELSE() {
13825 IEM_MC_ADVANCE_RIP();
13826 } IEM_MC_ENDIF();
13827 IEM_MC_END();
13828 return VINF_SUCCESS;
13829
13830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13831 }
13832}
13833
13834
13835/** Opcode 0xe3. */
13836FNIEMOP_DEF(iemOp_jecxz_Jb)
13837{
13838 IEMOP_MNEMONIC("jecxz Jb");
13839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13840 IEMOP_HLP_NO_LOCK_PREFIX();
13841 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13842
13843 switch (pIemCpu->enmEffAddrMode)
13844 {
13845 case IEMMODE_16BIT:
13846 IEM_MC_BEGIN(0,0);
13847 IEM_MC_IF_CX_IS_NZ() {
13848 IEM_MC_ADVANCE_RIP();
13849 } IEM_MC_ELSE() {
13850 IEM_MC_REL_JMP_S8(i8Imm);
13851 } IEM_MC_ENDIF();
13852 IEM_MC_END();
13853 return VINF_SUCCESS;
13854
13855 case IEMMODE_32BIT:
13856 IEM_MC_BEGIN(0,0);
13857 IEM_MC_IF_ECX_IS_NZ() {
13858 IEM_MC_ADVANCE_RIP();
13859 } IEM_MC_ELSE() {
13860 IEM_MC_REL_JMP_S8(i8Imm);
13861 } IEM_MC_ENDIF();
13862 IEM_MC_END();
13863 return VINF_SUCCESS;
13864
13865 case IEMMODE_64BIT:
13866 IEM_MC_BEGIN(0,0);
13867 IEM_MC_IF_RCX_IS_NZ() {
13868 IEM_MC_ADVANCE_RIP();
13869 } IEM_MC_ELSE() {
13870 IEM_MC_REL_JMP_S8(i8Imm);
13871 } IEM_MC_ENDIF();
13872 IEM_MC_END();
13873 return VINF_SUCCESS;
13874
13875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13876 }
13877}
13878
13879
13880/** Opcode 0xe4 */
13881FNIEMOP_DEF(iemOp_in_AL_Ib)
13882{
13883 IEMOP_MNEMONIC("in eAX,Ib");
13884 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13885 IEMOP_HLP_NO_LOCK_PREFIX();
13886 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
13887}
13888
13889
13890/** Opcode 0xe5 */
13891FNIEMOP_DEF(iemOp_in_eAX_Ib)
13892{
13893 IEMOP_MNEMONIC("in eAX,Ib");
13894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13895 IEMOP_HLP_NO_LOCK_PREFIX();
13896 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
13897}
13898
13899
13900/** Opcode 0xe6 */
13901FNIEMOP_DEF(iemOp_out_Ib_AL)
13902{
13903 IEMOP_MNEMONIC("out Ib,AL");
13904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13905 IEMOP_HLP_NO_LOCK_PREFIX();
13906 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
13907}
13908
13909
13910/** Opcode 0xe7 */
13911FNIEMOP_DEF(iemOp_out_Ib_eAX)
13912{
13913 IEMOP_MNEMONIC("out Ib,eAX");
13914 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13915 IEMOP_HLP_NO_LOCK_PREFIX();
13916 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
13917}
13918
13919
13920/** Opcode 0xe8. */
13921FNIEMOP_DEF(iemOp_call_Jv)
13922{
13923 IEMOP_MNEMONIC("call Jv");
13924 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13925 switch (pIemCpu->enmEffOpSize)
13926 {
13927 case IEMMODE_16BIT:
13928 {
13929 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13930 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
13931 }
13932
13933 case IEMMODE_32BIT:
13934 {
13935 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13936 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
13937 }
13938
13939 case IEMMODE_64BIT:
13940 {
13941 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13942 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
13943 }
13944
13945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13946 }
13947}
13948
13949
13950/** Opcode 0xe9. */
13951FNIEMOP_DEF(iemOp_jmp_Jv)
13952{
13953 IEMOP_MNEMONIC("jmp Jv");
13954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13955 switch (pIemCpu->enmEffOpSize)
13956 {
13957 case IEMMODE_16BIT:
13958 {
13959 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13960 IEM_MC_BEGIN(0, 0);
13961 IEM_MC_REL_JMP_S16(i16Imm);
13962 IEM_MC_END();
13963 return VINF_SUCCESS;
13964 }
13965
13966 case IEMMODE_64BIT:
13967 case IEMMODE_32BIT:
13968 {
13969 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13970 IEM_MC_BEGIN(0, 0);
13971 IEM_MC_REL_JMP_S32(i32Imm);
13972 IEM_MC_END();
13973 return VINF_SUCCESS;
13974 }
13975
13976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13977 }
13978}
13979
13980
13981/** Opcode 0xea. */
13982FNIEMOP_DEF(iemOp_jmp_Ap)
13983{
13984 IEMOP_MNEMONIC("jmp Ap");
13985 IEMOP_HLP_NO_64BIT();
13986
13987 /* Decode the far pointer address and pass it on to the far call C implementation. */
13988 uint32_t offSeg;
13989 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
13990 IEM_OPCODE_GET_NEXT_U32(&offSeg);
13991 else
13992 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
13993 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
13994 IEMOP_HLP_NO_LOCK_PREFIX();
13995 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
13996}
13997
13998
13999/** Opcode 0xeb. */
14000FNIEMOP_DEF(iemOp_jmp_Jb)
14001{
14002 IEMOP_MNEMONIC("jmp Jb");
14003 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14004 IEMOP_HLP_NO_LOCK_PREFIX();
14005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14006
14007 IEM_MC_BEGIN(0, 0);
14008 IEM_MC_REL_JMP_S8(i8Imm);
14009 IEM_MC_END();
14010 return VINF_SUCCESS;
14011}
14012
14013
14014/** Opcode 0xec */
14015FNIEMOP_DEF(iemOp_in_AL_DX)
14016{
14017 IEMOP_MNEMONIC("in AL,DX");
14018 IEMOP_HLP_NO_LOCK_PREFIX();
14019 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
14020}
14021
14022
14023/** Opcode 0xed */
14024FNIEMOP_DEF(iemOp_eAX_DX)
14025{
14026 IEMOP_MNEMONIC("in eAX,DX");
14027 IEMOP_HLP_NO_LOCK_PREFIX();
14028 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14029}
14030
14031
14032/** Opcode 0xee */
14033FNIEMOP_DEF(iemOp_out_DX_AL)
14034{
14035 IEMOP_MNEMONIC("out DX,AL");
14036 IEMOP_HLP_NO_LOCK_PREFIX();
14037 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
14038}
14039
14040
14041/** Opcode 0xef */
14042FNIEMOP_DEF(iemOp_out_DX_eAX)
14043{
14044 IEMOP_MNEMONIC("out DX,eAX");
14045 IEMOP_HLP_NO_LOCK_PREFIX();
14046 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14047}
14048
14049
14050/** Opcode 0xf0. */
14051FNIEMOP_DEF(iemOp_lock)
14052{
14053 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
14054
14055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14056 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14057}
14058
14059
14060/** Opcode 0xf2. */
14061FNIEMOP_DEF(iemOp_repne)
14062{
14063 /* This overrides any previous REPE prefix. */
14064 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
14065 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
14066
14067 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14068 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14069}
14070
14071
14072/** Opcode 0xf3. */
14073FNIEMOP_DEF(iemOp_repe)
14074{
14075 /* This overrides any previous REPNE prefix. */
14076 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
14077 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
14078
14079 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14080 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14081}
14082
14083
14084/** Opcode 0xf4. */
14085FNIEMOP_DEF(iemOp_hlt)
14086{
14087 IEMOP_HLP_NO_LOCK_PREFIX();
14088 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
14089}
14090
14091
14092/** Opcode 0xf5. */
14093FNIEMOP_DEF(iemOp_cmc)
14094{
14095 IEMOP_MNEMONIC("cmc");
14096 IEMOP_HLP_NO_LOCK_PREFIX();
14097 IEM_MC_BEGIN(0, 0);
14098 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14099 IEM_MC_ADVANCE_RIP();
14100 IEM_MC_END();
14101 return VINF_SUCCESS;
14102}
14103
14104
14105/**
14106 * Common implementation of 'inc/dec/not/neg Eb'.
14107 *
14108 * @param bRm The RM byte.
14109 * @param pImpl The instruction implementation.
14110 */
14111FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14112{
14113 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14114 {
14115 /* register access */
14116 IEM_MC_BEGIN(2, 0);
14117 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14118 IEM_MC_ARG(uint32_t *, pEFlags, 1);
14119 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14120 IEM_MC_REF_EFLAGS(pEFlags);
14121 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14122 IEM_MC_ADVANCE_RIP();
14123 IEM_MC_END();
14124 }
14125 else
14126 {
14127 /* memory access. */
14128 IEM_MC_BEGIN(2, 2);
14129 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14130 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14132
14133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14134 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14135 IEM_MC_FETCH_EFLAGS(EFlags);
14136 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14137 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14138 else
14139 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
14140
14141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14142 IEM_MC_COMMIT_EFLAGS(EFlags);
14143 IEM_MC_ADVANCE_RIP();
14144 IEM_MC_END();
14145 }
14146 return VINF_SUCCESS;
14147}
14148
14149
14150/**
14151 * Common implementation of 'inc/dec/not/neg Ev'.
14152 *
14153 * @param bRm The RM byte.
14154 * @param pImpl The instruction implementation.
14155 */
14156FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14157{
14158 /* Registers are handled by a common worker. */
14159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14160 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14161
14162 /* Memory we do here. */
14163 switch (pIemCpu->enmEffOpSize)
14164 {
14165 case IEMMODE_16BIT:
14166 IEM_MC_BEGIN(2, 2);
14167 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14168 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14170
14171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14172 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14173 IEM_MC_FETCH_EFLAGS(EFlags);
14174 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14175 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
14176 else
14177 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
14178
14179 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14180 IEM_MC_COMMIT_EFLAGS(EFlags);
14181 IEM_MC_ADVANCE_RIP();
14182 IEM_MC_END();
14183 return VINF_SUCCESS;
14184
14185 case IEMMODE_32BIT:
14186 IEM_MC_BEGIN(2, 2);
14187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14188 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14190
14191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14192 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14193 IEM_MC_FETCH_EFLAGS(EFlags);
14194 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14195 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
14196 else
14197 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
14198
14199 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14200 IEM_MC_COMMIT_EFLAGS(EFlags);
14201 IEM_MC_ADVANCE_RIP();
14202 IEM_MC_END();
14203 return VINF_SUCCESS;
14204
14205 case IEMMODE_64BIT:
14206 IEM_MC_BEGIN(2, 2);
14207 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14208 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14210
14211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14212 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14213 IEM_MC_FETCH_EFLAGS(EFlags);
14214 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14215 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
14216 else
14217 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
14218
14219 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14220 IEM_MC_COMMIT_EFLAGS(EFlags);
14221 IEM_MC_ADVANCE_RIP();
14222 IEM_MC_END();
14223 return VINF_SUCCESS;
14224
14225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14226 }
14227}
14228
14229
14230/** Opcode 0xf6 /0. */
14231FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14232{
14233 IEMOP_MNEMONIC("test Eb,Ib");
14234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14235
14236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14237 {
14238 /* register access */
14239 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14240 IEMOP_HLP_NO_LOCK_PREFIX();
14241
14242 IEM_MC_BEGIN(3, 0);
14243 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14244 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14245 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14246 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14247 IEM_MC_REF_EFLAGS(pEFlags);
14248 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14249 IEM_MC_ADVANCE_RIP();
14250 IEM_MC_END();
14251 }
14252 else
14253 {
14254 /* memory access. */
14255 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14256
14257 IEM_MC_BEGIN(3, 2);
14258 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14259 IEM_MC_ARG(uint8_t, u8Src, 1);
14260 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14262
14263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14264 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14265 IEM_MC_ASSIGN(u8Src, u8Imm);
14266 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14267 IEM_MC_FETCH_EFLAGS(EFlags);
14268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14269
14270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
14271 IEM_MC_COMMIT_EFLAGS(EFlags);
14272 IEM_MC_ADVANCE_RIP();
14273 IEM_MC_END();
14274 }
14275 return VINF_SUCCESS;
14276}
14277
14278
14279/** Opcode 0xf7 /0. */
14280FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14281{
14282 IEMOP_MNEMONIC("test Ev,Iv");
14283 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14284 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14285
14286 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14287 {
14288 /* register access */
14289 switch (pIemCpu->enmEffOpSize)
14290 {
14291 case IEMMODE_16BIT:
14292 {
14293 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14294 IEM_MC_BEGIN(3, 0);
14295 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14296 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14297 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14298 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14299 IEM_MC_REF_EFLAGS(pEFlags);
14300 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14301 IEM_MC_ADVANCE_RIP();
14302 IEM_MC_END();
14303 return VINF_SUCCESS;
14304 }
14305
14306 case IEMMODE_32BIT:
14307 {
14308 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14309 IEM_MC_BEGIN(3, 0);
14310 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14311 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14312 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14313 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14314 IEM_MC_REF_EFLAGS(pEFlags);
14315 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14316 IEM_MC_ADVANCE_RIP();
14317 IEM_MC_END();
14318 return VINF_SUCCESS;
14319 }
14320
14321 case IEMMODE_64BIT:
14322 {
14323 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14324 IEM_MC_BEGIN(3, 0);
14325 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14326 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14328 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14329 IEM_MC_REF_EFLAGS(pEFlags);
14330 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14331 IEM_MC_ADVANCE_RIP();
14332 IEM_MC_END();
14333 return VINF_SUCCESS;
14334 }
14335
14336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14337 }
14338 }
14339 else
14340 {
14341 /* memory access. */
14342 switch (pIemCpu->enmEffOpSize)
14343 {
14344 case IEMMODE_16BIT:
14345 {
14346 IEM_MC_BEGIN(3, 2);
14347 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14348 IEM_MC_ARG(uint16_t, u16Src, 1);
14349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14351
14352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14353 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14354 IEM_MC_ASSIGN(u16Src, u16Imm);
14355 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14356 IEM_MC_FETCH_EFLAGS(EFlags);
14357 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14358
14359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
14360 IEM_MC_COMMIT_EFLAGS(EFlags);
14361 IEM_MC_ADVANCE_RIP();
14362 IEM_MC_END();
14363 return VINF_SUCCESS;
14364 }
14365
14366 case IEMMODE_32BIT:
14367 {
14368 IEM_MC_BEGIN(3, 2);
14369 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14370 IEM_MC_ARG(uint32_t, u32Src, 1);
14371 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14373
14374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14375 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14376 IEM_MC_ASSIGN(u32Src, u32Imm);
14377 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14378 IEM_MC_FETCH_EFLAGS(EFlags);
14379 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14380
14381 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
14382 IEM_MC_COMMIT_EFLAGS(EFlags);
14383 IEM_MC_ADVANCE_RIP();
14384 IEM_MC_END();
14385 return VINF_SUCCESS;
14386 }
14387
14388 case IEMMODE_64BIT:
14389 {
14390 IEM_MC_BEGIN(3, 2);
14391 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14392 IEM_MC_ARG(uint64_t, u64Src, 1);
14393 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14395
14396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14397 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14398 IEM_MC_ASSIGN(u64Src, u64Imm);
14399 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14400 IEM_MC_FETCH_EFLAGS(EFlags);
14401 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14402
14403 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
14404 IEM_MC_COMMIT_EFLAGS(EFlags);
14405 IEM_MC_ADVANCE_RIP();
14406 IEM_MC_END();
14407 return VINF_SUCCESS;
14408 }
14409
14410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14411 }
14412 }
14413}
14414
14415
14416/** Opcode 0xf6 /4, /5, /6 and /7. */
14417FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
14418{
14419 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14420
14421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14422 {
14423 /* register access */
14424 IEMOP_HLP_NO_LOCK_PREFIX();
14425 IEM_MC_BEGIN(3, 0);
14426 IEM_MC_ARG(uint16_t *, pu16AX, 0);
14427 IEM_MC_ARG(uint8_t, u8Value, 1);
14428 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14429 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14430 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
14431 IEM_MC_REF_EFLAGS(pEFlags);
14432 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
14433 IEM_MC_ADVANCE_RIP();
14434 IEM_MC_END();
14435 }
14436 else
14437 {
14438 /* memory access. */
14439 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14440
14441 IEM_MC_BEGIN(3, 1);
14442 IEM_MC_ARG(uint16_t *, pu16AX, 0);
14443 IEM_MC_ARG(uint8_t, u8Value, 1);
14444 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14446
14447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14448 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
14449 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
14450 IEM_MC_REF_EFLAGS(pEFlags);
14451 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
14452
14453 IEM_MC_ADVANCE_RIP();
14454 IEM_MC_END();
14455 }
14456 return VINF_SUCCESS;
14457}
14458
14459
14460/** Opcode 0xf7 /4, /5, /6 and /7. */
14461FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
14462{
14463 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14464 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14465
14466 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14467 {
14468 /* register access */
14469 switch (pIemCpu->enmEffOpSize)
14470 {
14471 case IEMMODE_16BIT:
14472 {
14473 IEMOP_HLP_NO_LOCK_PREFIX();
14474 IEM_MC_BEGIN(4, 1);
14475 IEM_MC_ARG(uint16_t *, pu16AX, 0);
14476 IEM_MC_ARG(uint16_t *, pu16DX, 1);
14477 IEM_MC_ARG(uint16_t, u16Value, 2);
14478 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14479 IEM_MC_LOCAL(int32_t, rc);
14480
14481 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14482 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
14483 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
14484 IEM_MC_REF_EFLAGS(pEFlags);
14485 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
14486 IEM_MC_IF_LOCAL_IS_Z(rc) {
14487 IEM_MC_ADVANCE_RIP();
14488 } IEM_MC_ELSE() {
14489 IEM_MC_RAISE_DIVIDE_ERROR();
14490 } IEM_MC_ENDIF();
14491
14492 IEM_MC_END();
14493 return VINF_SUCCESS;
14494 }
14495
14496 case IEMMODE_32BIT:
14497 {
14498 IEMOP_HLP_NO_LOCK_PREFIX();
14499 IEM_MC_BEGIN(4, 1);
14500 IEM_MC_ARG(uint32_t *, pu32AX, 0);
14501 IEM_MC_ARG(uint32_t *, pu32DX, 1);
14502 IEM_MC_ARG(uint32_t, u32Value, 2);
14503 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14504 IEM_MC_LOCAL(int32_t, rc);
14505
14506 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14507 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
14508 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
14509 IEM_MC_REF_EFLAGS(pEFlags);
14510 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
14511 IEM_MC_IF_LOCAL_IS_Z(rc) {
14512 IEM_MC_ADVANCE_RIP();
14513 } IEM_MC_ELSE() {
14514 IEM_MC_RAISE_DIVIDE_ERROR();
14515 } IEM_MC_ENDIF();
14516
14517 IEM_MC_END();
14518 return VINF_SUCCESS;
14519 }
14520
14521 case IEMMODE_64BIT:
14522 {
14523 IEMOP_HLP_NO_LOCK_PREFIX();
14524 IEM_MC_BEGIN(4, 1);
14525 IEM_MC_ARG(uint64_t *, pu64AX, 0);
14526 IEM_MC_ARG(uint64_t *, pu64DX, 1);
14527 IEM_MC_ARG(uint64_t, u64Value, 2);
14528 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14529 IEM_MC_LOCAL(int32_t, rc);
14530
14531 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14532 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
14533 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
14534 IEM_MC_REF_EFLAGS(pEFlags);
14535 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
14536 IEM_MC_IF_LOCAL_IS_Z(rc) {
14537 IEM_MC_ADVANCE_RIP();
14538 } IEM_MC_ELSE() {
14539 IEM_MC_RAISE_DIVIDE_ERROR();
14540 } IEM_MC_ENDIF();
14541
14542 IEM_MC_END();
14543 return VINF_SUCCESS;
14544 }
14545
14546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14547 }
14548 }
14549 else
14550 {
14551 /* memory access. */
14552 switch (pIemCpu->enmEffOpSize)
14553 {
14554 case IEMMODE_16BIT:
14555 {
14556 IEMOP_HLP_NO_LOCK_PREFIX();
14557 IEM_MC_BEGIN(4, 2);
14558 IEM_MC_ARG(uint16_t *, pu16AX, 0);
14559 IEM_MC_ARG(uint16_t *, pu16DX, 1);
14560 IEM_MC_ARG(uint16_t, u16Value, 2);
14561 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14563 IEM_MC_LOCAL(int32_t, rc);
14564
14565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14566 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
14567 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
14568 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
14569 IEM_MC_REF_EFLAGS(pEFlags);
14570 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
14571 IEM_MC_IF_LOCAL_IS_Z(rc) {
14572 IEM_MC_ADVANCE_RIP();
14573 } IEM_MC_ELSE() {
14574 IEM_MC_RAISE_DIVIDE_ERROR();
14575 } IEM_MC_ENDIF();
14576
14577 IEM_MC_END();
14578 return VINF_SUCCESS;
14579 }
14580
14581 case IEMMODE_32BIT:
14582 {
14583 IEMOP_HLP_NO_LOCK_PREFIX();
14584 IEM_MC_BEGIN(4, 2);
14585 IEM_MC_ARG(uint32_t *, pu32AX, 0);
14586 IEM_MC_ARG(uint32_t *, pu32DX, 1);
14587 IEM_MC_ARG(uint32_t, u32Value, 2);
14588 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14590 IEM_MC_LOCAL(int32_t, rc);
14591
14592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14593 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
14594 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
14595 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
14596 IEM_MC_REF_EFLAGS(pEFlags);
14597 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
14598 IEM_MC_IF_LOCAL_IS_Z(rc) {
14599 IEM_MC_ADVANCE_RIP();
14600 } IEM_MC_ELSE() {
14601 IEM_MC_RAISE_DIVIDE_ERROR();
14602 } IEM_MC_ENDIF();
14603
14604 IEM_MC_END();
14605 return VINF_SUCCESS;
14606 }
14607
14608 case IEMMODE_64BIT:
14609 {
14610 IEMOP_HLP_NO_LOCK_PREFIX();
14611 IEM_MC_BEGIN(4, 2);
14612 IEM_MC_ARG(uint64_t *, pu64AX, 0);
14613 IEM_MC_ARG(uint64_t *, pu64DX, 1);
14614 IEM_MC_ARG(uint64_t, u64Value, 2);
14615 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14617 IEM_MC_LOCAL(int32_t, rc);
14618
14619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14620 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
14621 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
14622 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
14623 IEM_MC_REF_EFLAGS(pEFlags);
14624 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
14625 IEM_MC_IF_LOCAL_IS_Z(rc) {
14626 IEM_MC_ADVANCE_RIP();
14627 } IEM_MC_ELSE() {
14628 IEM_MC_RAISE_DIVIDE_ERROR();
14629 } IEM_MC_ENDIF();
14630
14631 IEM_MC_END();
14632 return VINF_SUCCESS;
14633 }
14634
14635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14636 }
14637 }
14638}
14639
14640/** Opcode 0xf6. */
14641FNIEMOP_DEF(iemOp_Grp3_Eb)
14642{
14643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14644 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14645 {
14646 case 0:
14647 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14648 case 1:
14649 return IEMOP_RAISE_INVALID_OPCODE();
14650 case 2:
14651 IEMOP_MNEMONIC("not Eb");
14652 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
14653 case 3:
14654 IEMOP_MNEMONIC("neg Eb");
14655 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
14656 case 4:
14657 IEMOP_MNEMONIC("mul Eb");
14658 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14659 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
14660 case 5:
14661 IEMOP_MNEMONIC("imul Eb");
14662 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14663 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
14664 case 6:
14665 IEMOP_MNEMONIC("div Eb");
14666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14667 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
14668 case 7:
14669 IEMOP_MNEMONIC("idiv Eb");
14670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14671 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
14672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14673 }
14674}
14675
14676
14677/** Opcode 0xf7. */
14678FNIEMOP_DEF(iemOp_Grp3_Ev)
14679{
14680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14681 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14682 {
14683 case 0:
14684 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14685 case 1:
14686 return IEMOP_RAISE_INVALID_OPCODE();
14687 case 2:
14688 IEMOP_MNEMONIC("not Ev");
14689 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
14690 case 3:
14691 IEMOP_MNEMONIC("neg Ev");
14692 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
14693 case 4:
14694 IEMOP_MNEMONIC("mul Ev");
14695 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14696 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
14697 case 5:
14698 IEMOP_MNEMONIC("imul Ev");
14699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14700 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
14701 case 6:
14702 IEMOP_MNEMONIC("div Ev");
14703 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14704 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
14705 case 7:
14706 IEMOP_MNEMONIC("idiv Ev");
14707 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14708 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
14709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14710 }
14711}
14712
14713
14714/** Opcode 0xf8. */
14715FNIEMOP_DEF(iemOp_clc)
14716{
14717 IEMOP_MNEMONIC("clc");
14718 IEMOP_HLP_NO_LOCK_PREFIX();
14719 IEM_MC_BEGIN(0, 0);
14720 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14721 IEM_MC_ADVANCE_RIP();
14722 IEM_MC_END();
14723 return VINF_SUCCESS;
14724}
14725
14726
14727/** Opcode 0xf9. */
14728FNIEMOP_DEF(iemOp_stc)
14729{
14730 IEMOP_MNEMONIC("stc");
14731 IEMOP_HLP_NO_LOCK_PREFIX();
14732 IEM_MC_BEGIN(0, 0);
14733 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14734 IEM_MC_ADVANCE_RIP();
14735 IEM_MC_END();
14736 return VINF_SUCCESS;
14737}
14738
14739
14740/** Opcode 0xfa. */
14741FNIEMOP_DEF(iemOp_cli)
14742{
14743 IEMOP_MNEMONIC("cli");
14744 IEMOP_HLP_NO_LOCK_PREFIX();
14745 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
14746}
14747
14748
14749FNIEMOP_DEF(iemOp_sti)
14750{
14751 IEMOP_MNEMONIC("sti");
14752 IEMOP_HLP_NO_LOCK_PREFIX();
14753 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
14754}
14755
14756
14757/** Opcode 0xfc. */
14758FNIEMOP_DEF(iemOp_cld)
14759{
14760 IEMOP_MNEMONIC("cld");
14761 IEMOP_HLP_NO_LOCK_PREFIX();
14762 IEM_MC_BEGIN(0, 0);
14763 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14764 IEM_MC_ADVANCE_RIP();
14765 IEM_MC_END();
14766 return VINF_SUCCESS;
14767}
14768
14769
14770/** Opcode 0xfd. */
14771FNIEMOP_DEF(iemOp_std)
14772{
14773 IEMOP_MNEMONIC("std");
14774 IEMOP_HLP_NO_LOCK_PREFIX();
14775 IEM_MC_BEGIN(0, 0);
14776 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14777 IEM_MC_ADVANCE_RIP();
14778 IEM_MC_END();
14779 return VINF_SUCCESS;
14780}
14781
14782
14783/** Opcode 0xfe. */
14784FNIEMOP_DEF(iemOp_Grp4)
14785{
14786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14787 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14788 {
14789 case 0:
14790 IEMOP_MNEMONIC("inc Ev");
14791 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
14792 case 1:
14793 IEMOP_MNEMONIC("dec Ev");
14794 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
14795 default:
14796 IEMOP_MNEMONIC("grp4-ud");
14797 return IEMOP_RAISE_INVALID_OPCODE();
14798 }
14799}
14800
14801
14802/**
14803 * Opcode 0xff /2.
14804 * @param bRm The RM byte.
14805 */
14806FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14807{
14808 IEMOP_MNEMONIC("calln Ev");
14809 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
14810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14811
14812 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14813 {
14814 /* The new RIP is taken from a register. */
14815 switch (pIemCpu->enmEffOpSize)
14816 {
14817 case IEMMODE_16BIT:
14818 IEM_MC_BEGIN(1, 0);
14819 IEM_MC_ARG(uint16_t, u16Target, 0);
14820 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14821 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
14822 IEM_MC_END()
14823 return VINF_SUCCESS;
14824
14825 case IEMMODE_32BIT:
14826 IEM_MC_BEGIN(1, 0);
14827 IEM_MC_ARG(uint32_t, u32Target, 0);
14828 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14829 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
14830 IEM_MC_END()
14831 return VINF_SUCCESS;
14832
14833 case IEMMODE_64BIT:
14834 IEM_MC_BEGIN(1, 0);
14835 IEM_MC_ARG(uint64_t, u64Target, 0);
14836 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14837 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
14838 IEM_MC_END()
14839 return VINF_SUCCESS;
14840
14841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14842 }
14843 }
14844 else
14845 {
14846 /* The new RIP is taken from a register. */
14847 switch (pIemCpu->enmEffOpSize)
14848 {
14849 case IEMMODE_16BIT:
14850 IEM_MC_BEGIN(1, 1);
14851 IEM_MC_ARG(uint16_t, u16Target, 0);
14852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14854 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14855 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
14856 IEM_MC_END()
14857 return VINF_SUCCESS;
14858
14859 case IEMMODE_32BIT:
14860 IEM_MC_BEGIN(1, 1);
14861 IEM_MC_ARG(uint32_t, u32Target, 0);
14862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14864 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14865 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
14866 IEM_MC_END()
14867 return VINF_SUCCESS;
14868
14869 case IEMMODE_64BIT:
14870 IEM_MC_BEGIN(1, 1);
14871 IEM_MC_ARG(uint64_t, u64Target, 0);
14872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14874 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14875 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
14876 IEM_MC_END()
14877 return VINF_SUCCESS;
14878
14879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14880 }
14881 }
14882}
14883
14884typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
14885
14886FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
14887{
14888 /* Registers? How?? */
14889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14890 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
14891
14892 /* Far pointer loaded from memory. */
14893 switch (pIemCpu->enmEffOpSize)
14894 {
14895 case IEMMODE_16BIT:
14896 IEM_MC_BEGIN(3, 1);
14897 IEM_MC_ARG(uint16_t, u16Sel, 0);
14898 IEM_MC_ARG(uint16_t, offSeg, 1);
14899 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
14900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14903 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
14904 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
14905 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
14906 IEM_MC_END();
14907 return VINF_SUCCESS;
14908
14909 case IEMMODE_32BIT:
14910 IEM_MC_BEGIN(3, 1);
14911 IEM_MC_ARG(uint16_t, u16Sel, 0);
14912 IEM_MC_ARG(uint32_t, offSeg, 1);
14913 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
14914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14917 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
14918 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
14919 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
14920 IEM_MC_END();
14921 return VINF_SUCCESS;
14922
14923 case IEMMODE_64BIT:
14924 IEM_MC_BEGIN(3, 1);
14925 IEM_MC_ARG(uint16_t, u16Sel, 0);
14926 IEM_MC_ARG(uint64_t, offSeg, 1);
14927 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
14928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14931 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
14932 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
14933 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
14934 IEM_MC_END();
14935 return VINF_SUCCESS;
14936
14937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14938 }
14939}
14940
14941
14942/**
14943 * Opcode 0xff /3.
14944 * @param bRm The RM byte.
14945 */
14946FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14947{
14948 IEMOP_MNEMONIC("callf Ep");
14949 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
14950}
14951
14952
14953/**
14954 * Opcode 0xff /4.
14955 * @param bRm The RM byte.
14956 */
14957FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14958{
14959 IEMOP_MNEMONIC("jmpn Ev");
14960 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
14961 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14962
14963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14964 {
14965 /* The new RIP is taken from a register. */
14966 switch (pIemCpu->enmEffOpSize)
14967 {
14968 case IEMMODE_16BIT:
14969 IEM_MC_BEGIN(0, 1);
14970 IEM_MC_LOCAL(uint16_t, u16Target);
14971 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14972 IEM_MC_SET_RIP_U16(u16Target);
14973 IEM_MC_END()
14974 return VINF_SUCCESS;
14975
14976 case IEMMODE_32BIT:
14977 IEM_MC_BEGIN(0, 1);
14978 IEM_MC_LOCAL(uint32_t, u32Target);
14979 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14980 IEM_MC_SET_RIP_U32(u32Target);
14981 IEM_MC_END()
14982 return VINF_SUCCESS;
14983
14984 case IEMMODE_64BIT:
14985 IEM_MC_BEGIN(0, 1);
14986 IEM_MC_LOCAL(uint64_t, u64Target);
14987 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14988 IEM_MC_SET_RIP_U64(u64Target);
14989 IEM_MC_END()
14990 return VINF_SUCCESS;
14991
14992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14993 }
14994 }
14995 else
14996 {
14997 /* The new RIP is taken from a register. */
14998 switch (pIemCpu->enmEffOpSize)
14999 {
15000 case IEMMODE_16BIT:
15001 IEM_MC_BEGIN(0, 2);
15002 IEM_MC_LOCAL(uint16_t, u16Target);
15003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15005 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15006 IEM_MC_SET_RIP_U16(u16Target);
15007 IEM_MC_END()
15008 return VINF_SUCCESS;
15009
15010 case IEMMODE_32BIT:
15011 IEM_MC_BEGIN(0, 2);
15012 IEM_MC_LOCAL(uint32_t, u32Target);
15013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15015 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15016 IEM_MC_SET_RIP_U32(u32Target);
15017 IEM_MC_END()
15018 return VINF_SUCCESS;
15019
15020 case IEMMODE_64BIT:
15021 IEM_MC_BEGIN(0, 2);
15022 IEM_MC_LOCAL(uint32_t, u32Target);
15023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15025 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15026 IEM_MC_SET_RIP_U32(u32Target);
15027 IEM_MC_END()
15028 return VINF_SUCCESS;
15029
15030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15031 }
15032 }
15033}
15034
15035
15036/**
15037 * Opcode 0xff /5.
15038 * @param bRm The RM byte.
15039 */
15040FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15041{
15042 IEMOP_MNEMONIC("jmp Ep");
15043 IEMOP_HLP_NO_64BIT();
15044 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
15045}
15046
15047
15048/**
15049 * Opcode 0xff /6.
15050 * @param bRm The RM byte.
15051 */
15052FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15053{
15054 IEMOP_MNEMONIC("push Ev");
15055 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15056
15057 /* Registers are handled by a common worker. */
15058 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15059 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15060
15061 /* Memory we do here. */
15062 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15063 switch (pIemCpu->enmEffOpSize)
15064 {
15065 case IEMMODE_16BIT:
15066 IEM_MC_BEGIN(0, 2);
15067 IEM_MC_LOCAL(uint16_t, u16Src);
15068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15070 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15071 IEM_MC_PUSH_U16(u16Src);
15072 IEM_MC_ADVANCE_RIP();
15073 IEM_MC_END();
15074 return VINF_SUCCESS;
15075
15076 case IEMMODE_32BIT:
15077 IEM_MC_BEGIN(0, 2);
15078 IEM_MC_LOCAL(uint32_t, u32Src);
15079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15081 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15082 IEM_MC_PUSH_U32(u32Src);
15083 IEM_MC_ADVANCE_RIP();
15084 IEM_MC_END();
15085 return VINF_SUCCESS;
15086
15087 case IEMMODE_64BIT:
15088 IEM_MC_BEGIN(0, 2);
15089 IEM_MC_LOCAL(uint64_t, u64Src);
15090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15092 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15093 IEM_MC_PUSH_U64(u64Src);
15094 IEM_MC_ADVANCE_RIP();
15095 IEM_MC_END();
15096 return VINF_SUCCESS;
15097
15098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15099 }
15100}
15101
15102
15103/** Opcode 0xff. */
15104FNIEMOP_DEF(iemOp_Grp5)
15105{
15106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15108 {
15109 case 0:
15110 IEMOP_MNEMONIC("inc Ev");
15111 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
15112 case 1:
15113 IEMOP_MNEMONIC("dec Ev");
15114 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
15115 case 2:
15116 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15117 case 3:
15118 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15119 case 4:
15120 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15121 case 5:
15122 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15123 case 6:
15124 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15125 case 7:
15126 IEMOP_MNEMONIC("grp5-ud");
15127 return IEMOP_RAISE_INVALID_OPCODE();
15128 }
15129 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
15130}
15131
15132
15133
15134const PFNIEMOP g_apfnOneByteMap[256] =
15135{
15136 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15137 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15138 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15139 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15140 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15141 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15142 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15143 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15144 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15145 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15146 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15147 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15148 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15149 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15150 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15151 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15152 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15153 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15154 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15155 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15156 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15157 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15158 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15159 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15160 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma, iemOp_arpl_Ew_Gw,
15161 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15162 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15163 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15164 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15165 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15166 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15167 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15168 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15169 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15170 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15171 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
15172 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15173 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15174 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15175 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15176 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15177 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15178 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15179 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15180 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15181 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15182 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15183 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15184 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15185 /* 0xc4 */ iemOp_les_Gv_Mp, iemOp_lds_Gv_Mp, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15186 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15187 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15188 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15189 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_Invalid, iemOp_xlat,
15190 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15191 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15192 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15193 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15194 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15195 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15196 /* 0xf0 */ iemOp_lock, iemOp_Invalid, iemOp_repne, iemOp_repe, /** @todo 0xf1 is INT1 / ICEBP. */
15197 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15198 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15199 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15200};
15201
15202
15203/** @} */
15204
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette