VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 96854

Last change on this file since 96854 was 96733, checked in by vboxsync, 2 years ago

VMM/IEM: fnsave modifies the FPU state, so actualize it for change rather than just read. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 395.2 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 96733 2022-09-14 09:23:41Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/* Instruction specification format - work in progress: */
60
61/**
62 * @opcode 0x00
63 * @opmnemonic add
64 * @op1 rm:Eb
65 * @op2 reg:Gb
66 * @opmaps one
67 * @openc ModR/M
68 * @opflmodify cf,pf,af,zf,sf,of
69 * @ophints harmless ignores_op_sizes
70 * @opstats add_Eb_Gb
71 * @opgroup og_gen_arith_bin
72 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
73 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
74 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
75 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
76 */
77FNIEMOP_DEF(iemOp_add_Eb_Gb)
78{
79 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
81}
82
83
84/**
85 * @opcode 0x01
86 * @opgroup og_gen_arith_bin
87 * @opflmodify cf,pf,af,zf,sf,of
88 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
89 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
90 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
91 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
92 */
93FNIEMOP_DEF(iemOp_add_Ev_Gv)
94{
95 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
97}
98
99
100/**
101 * @opcode 0x02
102 * @opgroup og_gen_arith_bin
103 * @opflmodify cf,pf,af,zf,sf,of
104 * @opcopytests iemOp_add_Eb_Gb
105 */
106FNIEMOP_DEF(iemOp_add_Gb_Eb)
107{
108 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
110}
111
112
113/**
114 * @opcode 0x03
115 * @opgroup og_gen_arith_bin
116 * @opflmodify cf,pf,af,zf,sf,of
117 * @opcopytests iemOp_add_Ev_Gv
118 */
119FNIEMOP_DEF(iemOp_add_Gv_Ev)
120{
121 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
123}
124
125
126/**
127 * @opcode 0x04
128 * @opgroup og_gen_arith_bin
129 * @opflmodify cf,pf,af,zf,sf,of
130 * @opcopytests iemOp_add_Eb_Gb
131 */
132FNIEMOP_DEF(iemOp_add_Al_Ib)
133{
134 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
136}
137
138
139/**
140 * @opcode 0x05
141 * @opgroup og_gen_arith_bin
142 * @opflmodify cf,pf,af,zf,sf,of
143 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
144 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
145 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
146 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
147 */
148FNIEMOP_DEF(iemOp_add_eAX_Iz)
149{
150 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
152}
153
154
155/**
156 * @opcode 0x06
157 * @opgroup og_stack_sreg
158 */
159FNIEMOP_DEF(iemOp_push_ES)
160{
161 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
162 IEMOP_HLP_NO_64BIT();
163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
164}
165
166
167/**
168 * @opcode 0x07
169 * @opgroup og_stack_sreg
170 */
171FNIEMOP_DEF(iemOp_pop_ES)
172{
173 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
174 IEMOP_HLP_NO_64BIT();
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
177}
178
179
180/**
181 * @opcode 0x08
182 * @opgroup og_gen_arith_bin
183 * @opflmodify cf,pf,af,zf,sf,of
184 * @opflundef af
185 * @opflclear of,cf
186 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
187 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
188 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
189 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
190 */
191FNIEMOP_DEF(iemOp_or_Eb_Gb)
192{
193 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
196}
197
198
199/*
200 * @opcode 0x09
201 * @opgroup og_gen_arith_bin
202 * @opflmodify cf,pf,af,zf,sf,of
203 * @opflundef af
204 * @opflclear of,cf
205 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
206 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
207 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
208 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
209 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
210 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
211 */
212FNIEMOP_DEF(iemOp_or_Ev_Gv)
213{
214 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
217}
218
219
220/**
221 * @opcode 0x0a
222 * @opgroup og_gen_arith_bin
223 * @opflmodify cf,pf,af,zf,sf,of
224 * @opflundef af
225 * @opflclear of,cf
226 * @opcopytests iemOp_or_Eb_Gb
227 */
228FNIEMOP_DEF(iemOp_or_Gb_Eb)
229{
230 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
233}
234
235
236/**
237 * @opcode 0x0b
238 * @opgroup og_gen_arith_bin
239 * @opflmodify cf,pf,af,zf,sf,of
240 * @opflundef af
241 * @opflclear of,cf
242 * @opcopytests iemOp_or_Ev_Gv
243 */
244FNIEMOP_DEF(iemOp_or_Gv_Ev)
245{
246 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0c
254 * @opgroup og_gen_arith_bin
255 * @opflmodify cf,pf,af,zf,sf,of
256 * @opflundef af
257 * @opflclear of,cf
258 * @opcopytests iemOp_or_Eb_Gb
259 */
260FNIEMOP_DEF(iemOp_or_Al_Ib)
261{
262 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
265}
266
267
268/**
269 * @opcode 0x0d
270 * @opgroup og_gen_arith_bin
271 * @opflmodify cf,pf,af,zf,sf,of
272 * @opflundef af
273 * @opflclear of,cf
274 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
275 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
276 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
277 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
278 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
279 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
280 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
281 */
282FNIEMOP_DEF(iemOp_or_eAX_Iz)
283{
284 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
287}
288
289
290/**
291 * @opcode 0x0e
292 * @opgroup og_stack_sreg
293 */
294FNIEMOP_DEF(iemOp_push_CS)
295{
296 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
297 IEMOP_HLP_NO_64BIT();
298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
299}
300
301
302/**
303 * @opcode 0x0f
304 * @opmnemonic EscTwo0f
305 * @openc two0f
306 * @opdisenum OP_2B_ESC
307 * @ophints harmless
308 * @opgroup og_escapes
309 */
310FNIEMOP_DEF(iemOp_2byteEscape)
311{
312#ifdef VBOX_STRICT
313 /* Sanity check the table the first time around. */
314 static bool s_fTested = false;
315 if (RT_LIKELY(s_fTested)) { /* likely */ }
316 else
317 {
318 s_fTested = true;
319 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
320 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
321 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
322 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
323 }
324#endif
325
326 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
327 {
328 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
329 IEMOP_HLP_MIN_286();
330 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
331 }
332 /* @opdone */
333
334 /*
335 * On the 8086 this is a POP CS instruction.
336 * For the time being we don't specify this this.
337 */
338 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
339 IEMOP_HLP_NO_64BIT();
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
342}
343
344/**
345 * @opcode 0x10
346 * @opgroup og_gen_arith_bin
347 * @opfltest cf
348 * @opflmodify cf,pf,af,zf,sf,of
349 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
350 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
351 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
352 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
353 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
354 */
355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
356{
357 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
359}
360
361
362/**
363 * @opcode 0x11
364 * @opgroup og_gen_arith_bin
365 * @opfltest cf
366 * @opflmodify cf,pf,af,zf,sf,of
367 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
368 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
369 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
370 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
371 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
372 */
373FNIEMOP_DEF(iemOp_adc_Ev_Gv)
374{
375 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x12
382 * @opgroup og_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify cf,pf,af,zf,sf,of
385 * @opcopytests iemOp_adc_Eb_Gb
386 */
387FNIEMOP_DEF(iemOp_adc_Gb_Eb)
388{
389 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
391}
392
393
394/**
395 * @opcode 0x13
396 * @opgroup og_gen_arith_bin
397 * @opfltest cf
398 * @opflmodify cf,pf,af,zf,sf,of
399 * @opcopytests iemOp_adc_Ev_Gv
400 */
401FNIEMOP_DEF(iemOp_adc_Gv_Ev)
402{
403 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
405}
406
407
408/**
409 * @opcode 0x14
410 * @opgroup og_gen_arith_bin
411 * @opfltest cf
412 * @opflmodify cf,pf,af,zf,sf,of
413 * @opcopytests iemOp_adc_Eb_Gb
414 */
415FNIEMOP_DEF(iemOp_adc_Al_Ib)
416{
417 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
419}
420
421
422/**
423 * @opcode 0x15
424 * @opgroup og_gen_arith_bin
425 * @opfltest cf
426 * @opflmodify cf,pf,af,zf,sf,of
427 * @opcopytests iemOp_adc_Ev_Gv
428 */
429FNIEMOP_DEF(iemOp_adc_eAX_Iz)
430{
431 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
433}
434
435
436/**
437 * @opcode 0x16
438 */
439FNIEMOP_DEF(iemOp_push_SS)
440{
441 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
442 IEMOP_HLP_NO_64BIT();
443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
444}
445
446
447/**
448 * @opcode 0x17
449 * @opgroup og_gen_arith_bin
450 * @opfltest cf
451 * @opflmodify cf,pf,af,zf,sf,of
452 */
453FNIEMOP_DEF(iemOp_pop_SS)
454{
455 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_HLP_NO_64BIT();
458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
459}
460
461
462/**
463 * @opcode 0x18
464 * @opgroup og_gen_arith_bin
465 * @opfltest cf
466 * @opflmodify cf,pf,af,zf,sf,of
467 */
468FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
469{
470 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
472}
473
474
475/**
476 * @opcode 0x19
477 * @opgroup og_gen_arith_bin
478 * @opfltest cf
479 * @opflmodify cf,pf,af,zf,sf,of
480 */
481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
482{
483 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
485}
486
487
488/**
489 * @opcode 0x1a
490 * @opgroup og_gen_arith_bin
491 * @opfltest cf
492 * @opflmodify cf,pf,af,zf,sf,of
493 */
494FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
495{
496 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
498}
499
500
501/**
502 * @opcode 0x1b
503 * @opgroup og_gen_arith_bin
504 * @opfltest cf
505 * @opflmodify cf,pf,af,zf,sf,of
506 */
507FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
508{
509 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
511}
512
513
514/**
515 * @opcode 0x1c
516 * @opgroup og_gen_arith_bin
517 * @opfltest cf
518 * @opflmodify cf,pf,af,zf,sf,of
519 */
520FNIEMOP_DEF(iemOp_sbb_Al_Ib)
521{
522 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
524}
525
526
527/**
528 * @opcode 0x1d
529 * @opgroup og_gen_arith_bin
530 * @opfltest cf
531 * @opflmodify cf,pf,af,zf,sf,of
532 */
533FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
534{
535 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
537}
538
539
540/**
541 * @opcode 0x1e
542 * @opgroup og_stack_sreg
543 */
544FNIEMOP_DEF(iemOp_push_DS)
545{
546 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
547 IEMOP_HLP_NO_64BIT();
548 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
549}
550
551
552/**
553 * @opcode 0x1f
554 * @opgroup og_stack_sreg
555 */
556FNIEMOP_DEF(iemOp_pop_DS)
557{
558 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEMOP_HLP_NO_64BIT();
561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
562}
563
564
565/**
566 * @opcode 0x20
567 * @opgroup og_gen_arith_bin
568 * @opflmodify cf,pf,af,zf,sf,of
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Eb_Gb)
573{
574 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x21
582 * @opgroup og_gen_arith_bin
583 * @opflmodify cf,pf,af,zf,sf,of
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Ev_Gv)
588{
589 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x22
597 * @opgroup og_gen_arith_bin
598 * @opflmodify cf,pf,af,zf,sf,of
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Gb_Eb)
603{
604 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x23
612 * @opgroup og_gen_arith_bin
613 * @opflmodify cf,pf,af,zf,sf,of
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_Gv_Ev)
618{
619 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x24
627 * @opgroup og_gen_arith_bin
628 * @opflmodify cf,pf,af,zf,sf,of
629 * @opflundef af
630 * @opflclear of,cf
631 */
632FNIEMOP_DEF(iemOp_and_Al_Ib)
633{
634 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
637}
638
639
640/**
641 * @opcode 0x25
642 * @opgroup og_gen_arith_bin
643 * @opflmodify cf,pf,af,zf,sf,of
644 * @opflundef af
645 * @opflclear of,cf
646 */
647FNIEMOP_DEF(iemOp_and_eAX_Iz)
648{
649 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
652}
653
654
655/**
656 * @opcode 0x26
657 * @opmnemonic SEG
658 * @op1 ES
659 * @opgroup og_prefix
660 * @openc prefix
661 * @opdisenum OP_SEG
662 * @ophints harmless
663 */
664FNIEMOP_DEF(iemOp_seg_ES)
665{
666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
668 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
669
670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
672}
673
674
675/**
676 * @opcode 0x27
677 * @opfltest af,cf
678 * @opflmodify cf,pf,af,zf,sf,of
679 * @opflundef of
680 */
681FNIEMOP_DEF(iemOp_daa)
682{
683 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
684 IEMOP_HLP_NO_64BIT();
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
688}
689
690
691/**
692 * @opcode 0x28
693 * @opgroup og_gen_arith_bin
694 * @opflmodify cf,pf,af,zf,sf,of
695 */
696FNIEMOP_DEF(iemOp_sub_Eb_Gb)
697{
698 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
700}
701
702
703/**
704 * @opcode 0x29
705 * @opgroup og_gen_arith_bin
706 * @opflmodify cf,pf,af,zf,sf,of
707 */
708FNIEMOP_DEF(iemOp_sub_Ev_Gv)
709{
710 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
712}
713
714
715/**
716 * @opcode 0x2a
717 * @opgroup og_gen_arith_bin
718 * @opflmodify cf,pf,af,zf,sf,of
719 */
720FNIEMOP_DEF(iemOp_sub_Gb_Eb)
721{
722 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
724}
725
726
727/**
728 * @opcode 0x2b
729 * @opgroup og_gen_arith_bin
730 * @opflmodify cf,pf,af,zf,sf,of
731 */
732FNIEMOP_DEF(iemOp_sub_Gv_Ev)
733{
734 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
736}
737
738
739/**
740 * @opcode 0x2c
741 * @opgroup og_gen_arith_bin
742 * @opflmodify cf,pf,af,zf,sf,of
743 */
744FNIEMOP_DEF(iemOp_sub_Al_Ib)
745{
746 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
748}
749
750
751/**
752 * @opcode 0x2d
753 * @opgroup og_gen_arith_bin
754 * @opflmodify cf,pf,af,zf,sf,of
755 */
756FNIEMOP_DEF(iemOp_sub_eAX_Iz)
757{
758 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
760}
761
762
763/**
764 * @opcode 0x2e
765 * @opmnemonic SEG
766 * @op1 CS
767 * @opgroup og_prefix
768 * @openc prefix
769 * @opdisenum OP_SEG
770 * @ophints harmless
771 */
772FNIEMOP_DEF(iemOp_seg_CS)
773{
774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
775 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
776 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
777
778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
780}
781
782
783/**
784 * @opcode 0x2f
785 * @opfltest af,cf
786 * @opflmodify cf,pf,af,zf,sf,of
787 * @opflundef of
788 */
789FNIEMOP_DEF(iemOp_das)
790{
791 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
796}
797
798
799/**
800 * @opcode 0x30
801 * @opgroup og_gen_arith_bin
802 * @opflmodify cf,pf,af,zf,sf,of
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Eb_Gb)
807{
808 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x31
816 * @opgroup og_gen_arith_bin
817 * @opflmodify cf,pf,af,zf,sf,of
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Ev_Gv)
822{
823 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x32
831 * @opgroup og_gen_arith_bin
832 * @opflmodify cf,pf,af,zf,sf,of
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Gb_Eb)
837{
838 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x33
846 * @opgroup og_gen_arith_bin
847 * @opflmodify cf,pf,af,zf,sf,of
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_Gv_Ev)
852{
853 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x34
861 * @opgroup og_gen_arith_bin
862 * @opflmodify cf,pf,af,zf,sf,of
863 * @opflundef af
864 * @opflclear of,cf
865 */
866FNIEMOP_DEF(iemOp_xor_Al_Ib)
867{
868 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
871}
872
873
874/**
875 * @opcode 0x35
876 * @opgroup og_gen_arith_bin
877 * @opflmodify cf,pf,af,zf,sf,of
878 * @opflundef af
879 * @opflclear of,cf
880 */
881FNIEMOP_DEF(iemOp_xor_eAX_Iz)
882{
883 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
886}
887
888
889/**
890 * @opcode 0x36
891 * @opmnemonic SEG
892 * @op1 SS
893 * @opgroup og_prefix
894 * @openc prefix
895 * @opdisenum OP_SEG
896 * @ophints harmless
897 */
898FNIEMOP_DEF(iemOp_seg_SS)
899{
900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
902 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
903
904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
906}
907
908
909/**
910 * @opcode 0x37
911 * @opfltest af,cf
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef pf,zf,sf,of
914 * @opgroup og_gen_arith_dec
915 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
916 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
917 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
918 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
919 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
920 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
921 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
922 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
925 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
926 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
927 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
928 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
929 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
930 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
931 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
933 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
936 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
937 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
938 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
939 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
940 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
941 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
942 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
943 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
944 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
945 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
946 */
947FNIEMOP_DEF(iemOp_aaa)
948{
949 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
950 IEMOP_HLP_NO_64BIT();
951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
953
954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
955}
956
957
958/**
959 * @opcode 0x38
960 */
961FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
962{
963 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x39
970 */
971FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
972{
973 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3a
980 */
981FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
982{
983 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3b
990 */
991FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
992{
993 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3c
1000 */
1001FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1002{
1003 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3d
1010 */
1011FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1012{
1013 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1015}
1016
1017
1018/**
1019 * @opcode 0x3e
1020 */
1021FNIEMOP_DEF(iemOp_seg_DS)
1022{
1023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1025 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1026
1027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1029}
1030
1031
1032/**
1033 * @opcode 0x3f
1034 * @opfltest af,cf
1035 * @opflmodify cf,pf,af,zf,sf,of
1036 * @opflundef pf,zf,sf,of
1037 * @opgroup og_gen_arith_dec
1038 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1039 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1040 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1041 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1042 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1043 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1045 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1046 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1047 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1048 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1049 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1050 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1051 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1052 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1053 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1058 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1059 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1060 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1061 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1062 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1063 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1064 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1065 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1066 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1067 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1068 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1069 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1070 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1071 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1072 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1073 */
1074FNIEMOP_DEF(iemOp_aas)
1075{
1076 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1077 IEMOP_HLP_NO_64BIT();
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1080
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1082}
1083
1084
1085/**
1086 * Common 'inc/dec/not/neg register' helper.
1087 */
1088FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1089{
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 switch (pVCpu->iem.s.enmEffOpSize)
1092 {
1093 case IEMMODE_16BIT:
1094 IEM_MC_BEGIN(2, 0);
1095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1096 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1097 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1098 IEM_MC_REF_EFLAGS(pEFlags);
1099 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 return VINF_SUCCESS;
1103
1104 case IEMMODE_32BIT:
1105 IEM_MC_BEGIN(2, 0);
1106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1107 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1108 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1109 IEM_MC_REF_EFLAGS(pEFlags);
1110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1112 IEM_MC_ADVANCE_RIP();
1113 IEM_MC_END();
1114 return VINF_SUCCESS;
1115
1116 case IEMMODE_64BIT:
1117 IEM_MC_BEGIN(2, 0);
1118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1119 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1120 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1121 IEM_MC_REF_EFLAGS(pEFlags);
1122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1123 IEM_MC_ADVANCE_RIP();
1124 IEM_MC_END();
1125 return VINF_SUCCESS;
1126 }
1127 return VINF_SUCCESS;
1128}
1129
1130
1131/**
1132 * @opcode 0x40
1133 */
1134FNIEMOP_DEF(iemOp_inc_eAX)
1135{
1136 /*
1137 * This is a REX prefix in 64-bit mode.
1138 */
1139 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1140 {
1141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1143
1144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1146 }
1147
1148 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1149 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1150}
1151
1152
1153/**
1154 * @opcode 0x41
1155 */
1156FNIEMOP_DEF(iemOp_inc_eCX)
1157{
1158 /*
1159 * This is a REX prefix in 64-bit mode.
1160 */
1161 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1162 {
1163 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1164 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1165 pVCpu->iem.s.uRexB = 1 << 3;
1166
1167 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1168 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1169 }
1170
1171 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1172 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1173}
1174
1175
1176/**
1177 * @opcode 0x42
1178 */
1179FNIEMOP_DEF(iemOp_inc_eDX)
1180{
1181 /*
1182 * This is a REX prefix in 64-bit mode.
1183 */
1184 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1185 {
1186 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1187 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1188 pVCpu->iem.s.uRexIndex = 1 << 3;
1189
1190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1192 }
1193
1194 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1195 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1196}
1197
1198
1199
1200/**
1201 * @opcode 0x43
1202 */
1203FNIEMOP_DEF(iemOp_inc_eBX)
1204{
1205 /*
1206 * This is a REX prefix in 64-bit mode.
1207 */
1208 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1209 {
1210 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1211 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1212 pVCpu->iem.s.uRexB = 1 << 3;
1213 pVCpu->iem.s.uRexIndex = 1 << 3;
1214
1215 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1216 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1217 }
1218
1219 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1220 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1221}
1222
1223
1224/**
1225 * @opcode 0x44
1226 */
1227FNIEMOP_DEF(iemOp_inc_eSP)
1228{
1229 /*
1230 * This is a REX prefix in 64-bit mode.
1231 */
1232 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1233 {
1234 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1235 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1236 pVCpu->iem.s.uRexReg = 1 << 3;
1237
1238 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1239 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1240 }
1241
1242 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1243 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1244}
1245
1246
1247/**
1248 * @opcode 0x45
1249 */
1250FNIEMOP_DEF(iemOp_inc_eBP)
1251{
1252 /*
1253 * This is a REX prefix in 64-bit mode.
1254 */
1255 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1256 {
1257 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1259 pVCpu->iem.s.uRexReg = 1 << 3;
1260 pVCpu->iem.s.uRexB = 1 << 3;
1261
1262 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1263 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1264 }
1265
1266 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1267 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1268}
1269
1270
1271/**
1272 * @opcode 0x46
1273 */
1274FNIEMOP_DEF(iemOp_inc_eSI)
1275{
1276 /*
1277 * This is a REX prefix in 64-bit mode.
1278 */
1279 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1280 {
1281 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1282 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1283 pVCpu->iem.s.uRexReg = 1 << 3;
1284 pVCpu->iem.s.uRexIndex = 1 << 3;
1285
1286 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1287 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1288 }
1289
1290 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1291 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1292}
1293
1294
1295/**
1296 * @opcode 0x47
1297 */
1298FNIEMOP_DEF(iemOp_inc_eDI)
1299{
1300 /*
1301 * This is a REX prefix in 64-bit mode.
1302 */
1303 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1304 {
1305 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1306 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1307 pVCpu->iem.s.uRexReg = 1 << 3;
1308 pVCpu->iem.s.uRexB = 1 << 3;
1309 pVCpu->iem.s.uRexIndex = 1 << 3;
1310
1311 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1312 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1313 }
1314
1315 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1316 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1317}
1318
1319
1320/**
1321 * @opcode 0x48
1322 */
1323FNIEMOP_DEF(iemOp_dec_eAX)
1324{
1325 /*
1326 * This is a REX prefix in 64-bit mode.
1327 */
1328 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1329 {
1330 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1331 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1332 iemRecalEffOpSize(pVCpu);
1333
1334 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1335 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1336 }
1337
1338 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1339 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1340}
1341
1342
1343/**
1344 * @opcode 0x49
1345 */
1346FNIEMOP_DEF(iemOp_dec_eCX)
1347{
1348 /*
1349 * This is a REX prefix in 64-bit mode.
1350 */
1351 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1352 {
1353 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1355 pVCpu->iem.s.uRexB = 1 << 3;
1356 iemRecalEffOpSize(pVCpu);
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360 }
1361
1362 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1363 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1364}
1365
1366
1367/**
1368 * @opcode 0x4a
1369 */
1370FNIEMOP_DEF(iemOp_dec_eDX)
1371{
1372 /*
1373 * This is a REX prefix in 64-bit mode.
1374 */
1375 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1376 {
1377 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1378 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1379 pVCpu->iem.s.uRexIndex = 1 << 3;
1380 iemRecalEffOpSize(pVCpu);
1381
1382 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1383 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1384 }
1385
1386 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1387 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1388}
1389
1390
1391/**
1392 * @opcode 0x4b
1393 */
1394FNIEMOP_DEF(iemOp_dec_eBX)
1395{
1396 /*
1397 * This is a REX prefix in 64-bit mode.
1398 */
1399 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1400 {
1401 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1402 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1403 pVCpu->iem.s.uRexB = 1 << 3;
1404 pVCpu->iem.s.uRexIndex = 1 << 3;
1405 iemRecalEffOpSize(pVCpu);
1406
1407 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1408 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1409 }
1410
1411 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1412 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1413}
1414
1415
1416/**
1417 * @opcode 0x4c
1418 */
1419FNIEMOP_DEF(iemOp_dec_eSP)
1420{
1421 /*
1422 * This is a REX prefix in 64-bit mode.
1423 */
1424 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1425 {
1426 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1427 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1428 pVCpu->iem.s.uRexReg = 1 << 3;
1429 iemRecalEffOpSize(pVCpu);
1430
1431 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1432 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1433 }
1434
1435 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1436 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1437}
1438
1439
1440/**
1441 * @opcode 0x4d
1442 */
1443FNIEMOP_DEF(iemOp_dec_eBP)
1444{
1445 /*
1446 * This is a REX prefix in 64-bit mode.
1447 */
1448 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1449 {
1450 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1451 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1452 pVCpu->iem.s.uRexReg = 1 << 3;
1453 pVCpu->iem.s.uRexB = 1 << 3;
1454 iemRecalEffOpSize(pVCpu);
1455
1456 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1457 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1458 }
1459
1460 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1461 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1462}
1463
1464
1465/**
1466 * @opcode 0x4e
1467 */
1468FNIEMOP_DEF(iemOp_dec_eSI)
1469{
1470 /*
1471 * This is a REX prefix in 64-bit mode.
1472 */
1473 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1474 {
1475 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1476 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1477 pVCpu->iem.s.uRexReg = 1 << 3;
1478 pVCpu->iem.s.uRexIndex = 1 << 3;
1479 iemRecalEffOpSize(pVCpu);
1480
1481 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1482 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1483 }
1484
1485 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1486 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1487}
1488
1489
1490/**
1491 * @opcode 0x4f
1492 */
1493FNIEMOP_DEF(iemOp_dec_eDI)
1494{
1495 /*
1496 * This is a REX prefix in 64-bit mode.
1497 */
1498 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1499 {
1500 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1501 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1502 pVCpu->iem.s.uRexReg = 1 << 3;
1503 pVCpu->iem.s.uRexB = 1 << 3;
1504 pVCpu->iem.s.uRexIndex = 1 << 3;
1505 iemRecalEffOpSize(pVCpu);
1506
1507 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1508 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1509 }
1510
1511 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1512 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1513}
1514
1515
1516/**
1517 * Common 'push register' helper.
1518 */
1519FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1520{
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1523 {
1524 iReg |= pVCpu->iem.s.uRexB;
1525 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1526 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1527 }
1528
1529 switch (pVCpu->iem.s.enmEffOpSize)
1530 {
1531 case IEMMODE_16BIT:
1532 IEM_MC_BEGIN(0, 1);
1533 IEM_MC_LOCAL(uint16_t, u16Value);
1534 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1535 IEM_MC_PUSH_U16(u16Value);
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 break;
1539
1540 case IEMMODE_32BIT:
1541 IEM_MC_BEGIN(0, 1);
1542 IEM_MC_LOCAL(uint32_t, u32Value);
1543 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1544 IEM_MC_PUSH_U32(u32Value);
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 break;
1548
1549 case IEMMODE_64BIT:
1550 IEM_MC_BEGIN(0, 1);
1551 IEM_MC_LOCAL(uint64_t, u64Value);
1552 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1553 IEM_MC_PUSH_U64(u64Value);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 break;
1557 }
1558
1559 return VINF_SUCCESS;
1560}
1561
1562
1563/**
1564 * @opcode 0x50
1565 */
1566FNIEMOP_DEF(iemOp_push_eAX)
1567{
1568 IEMOP_MNEMONIC(push_rAX, "push rAX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1570}
1571
1572
1573/**
1574 * @opcode 0x51
1575 */
1576FNIEMOP_DEF(iemOp_push_eCX)
1577{
1578 IEMOP_MNEMONIC(push_rCX, "push rCX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1580}
1581
1582
1583/**
1584 * @opcode 0x52
1585 */
1586FNIEMOP_DEF(iemOp_push_eDX)
1587{
1588 IEMOP_MNEMONIC(push_rDX, "push rDX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1590}
1591
1592
1593/**
1594 * @opcode 0x53
1595 */
1596FNIEMOP_DEF(iemOp_push_eBX)
1597{
1598 IEMOP_MNEMONIC(push_rBX, "push rBX");
1599 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1600}
1601
1602
1603/**
1604 * @opcode 0x54
1605 */
1606FNIEMOP_DEF(iemOp_push_eSP)
1607{
1608 IEMOP_MNEMONIC(push_rSP, "push rSP");
1609 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1610 {
1611 IEM_MC_BEGIN(0, 1);
1612 IEM_MC_LOCAL(uint16_t, u16Value);
1613 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1614 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1615 IEM_MC_PUSH_U16(u16Value);
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1620}
1621
1622
1623/**
1624 * @opcode 0x55
1625 */
1626FNIEMOP_DEF(iemOp_push_eBP)
1627{
1628 IEMOP_MNEMONIC(push_rBP, "push rBP");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1630}
1631
1632
1633/**
1634 * @opcode 0x56
1635 */
1636FNIEMOP_DEF(iemOp_push_eSI)
1637{
1638 IEMOP_MNEMONIC(push_rSI, "push rSI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1640}
1641
1642
1643/**
1644 * @opcode 0x57
1645 */
1646FNIEMOP_DEF(iemOp_push_eDI)
1647{
1648 IEMOP_MNEMONIC(push_rDI, "push rDI");
1649 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1650}
1651
1652
1653/**
1654 * Common 'pop register' helper.
1655 */
1656FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1657{
1658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1659 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1660 {
1661 iReg |= pVCpu->iem.s.uRexB;
1662 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1663 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1664 }
1665
1666 switch (pVCpu->iem.s.enmEffOpSize)
1667 {
1668 case IEMMODE_16BIT:
1669 IEM_MC_BEGIN(0, 1);
1670 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1671 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1672 IEM_MC_POP_U16(pu16Dst);
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_32BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1680 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1681 IEM_MC_POP_U32(pu32Dst);
1682 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1683 IEM_MC_ADVANCE_RIP();
1684 IEM_MC_END();
1685 break;
1686
1687 case IEMMODE_64BIT:
1688 IEM_MC_BEGIN(0, 1);
1689 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1690 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1691 IEM_MC_POP_U64(pu64Dst);
1692 IEM_MC_ADVANCE_RIP();
1693 IEM_MC_END();
1694 break;
1695 }
1696
1697 return VINF_SUCCESS;
1698}
1699
1700
1701/**
1702 * @opcode 0x58
1703 */
1704FNIEMOP_DEF(iemOp_pop_eAX)
1705{
1706 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1708}
1709
1710
1711/**
1712 * @opcode 0x59
1713 */
1714FNIEMOP_DEF(iemOp_pop_eCX)
1715{
1716 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5a
1723 */
1724FNIEMOP_DEF(iemOp_pop_eDX)
1725{
1726 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5b
1733 */
1734FNIEMOP_DEF(iemOp_pop_eBX)
1735{
1736 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1737 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1738}
1739
1740
1741/**
1742 * @opcode 0x5c
1743 */
1744FNIEMOP_DEF(iemOp_pop_eSP)
1745{
1746 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1747 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1748 {
1749 if (pVCpu->iem.s.uRexB)
1750 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1751 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1752 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1753 }
1754
1755 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1756 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1757 /** @todo add testcase for this instruction. */
1758 switch (pVCpu->iem.s.enmEffOpSize)
1759 {
1760 case IEMMODE_16BIT:
1761 IEM_MC_BEGIN(0, 1);
1762 IEM_MC_LOCAL(uint16_t, u16Dst);
1763 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1764 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1765 IEM_MC_ADVANCE_RIP();
1766 IEM_MC_END();
1767 break;
1768
1769 case IEMMODE_32BIT:
1770 IEM_MC_BEGIN(0, 1);
1771 IEM_MC_LOCAL(uint32_t, u32Dst);
1772 IEM_MC_POP_U32(&u32Dst);
1773 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 break;
1777
1778 case IEMMODE_64BIT:
1779 IEM_MC_BEGIN(0, 1);
1780 IEM_MC_LOCAL(uint64_t, u64Dst);
1781 IEM_MC_POP_U64(&u64Dst);
1782 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 break;
1786 }
1787
1788 return VINF_SUCCESS;
1789}
1790
1791
1792/**
1793 * @opcode 0x5d
1794 */
1795FNIEMOP_DEF(iemOp_pop_eBP)
1796{
1797 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1799}
1800
1801
1802/**
1803 * @opcode 0x5e
1804 */
1805FNIEMOP_DEF(iemOp_pop_eSI)
1806{
1807 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1809}
1810
1811
1812/**
1813 * @opcode 0x5f
1814 */
1815FNIEMOP_DEF(iemOp_pop_eDI)
1816{
1817 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1818 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1819}
1820
1821
1822/**
1823 * @opcode 0x60
1824 */
1825FNIEMOP_DEF(iemOp_pusha)
1826{
1827 IEMOP_MNEMONIC(pusha, "pusha");
1828 IEMOP_HLP_MIN_186();
1829 IEMOP_HLP_NO_64BIT();
1830 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1831 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1832 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1833 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1834}
1835
1836
1837/**
1838 * @opcode 0x61
1839 */
1840FNIEMOP_DEF(iemOp_popa__mvex)
1841{
1842 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1843 {
1844 IEMOP_MNEMONIC(popa, "popa");
1845 IEMOP_HLP_MIN_186();
1846 IEMOP_HLP_NO_64BIT();
1847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1848 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1849 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1850 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1851 }
1852 IEMOP_MNEMONIC(mvex, "mvex");
1853 Log(("mvex prefix is not supported!\n"));
1854 return IEMOP_RAISE_INVALID_OPCODE();
1855}
1856
1857
1858/**
1859 * @opcode 0x62
1860 * @opmnemonic bound
1861 * @op1 Gv_RO
1862 * @op2 Ma
1863 * @opmincpu 80186
1864 * @ophints harmless invalid_64
1865 * @optest op1=0 op2=0 ->
1866 * @optest op1=1 op2=0 -> value.xcpt=5
1867 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1868 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1869 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1870 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1871 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1872 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1873 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1874 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1875 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1876 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1877 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1878 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1879 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1880 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1881 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1882 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1883 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1884 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1885 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1886 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1887 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1888 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1889 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1890 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1891 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1892 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1893 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1894 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1895 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1896 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1897 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1898 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1899 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1900 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1901 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1902 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1903 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1904 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1905 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1906 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1907 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1908 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1909 */
1910FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1911{
1912 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1913 compatability mode it is invalid with MOD=3.
1914
1915 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1916 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1917 given as R and X without an exact description, so we assume it builds on
1918 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1919 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1920 uint8_t bRm;
1921 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1922 {
1923 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1924 IEMOP_HLP_MIN_186();
1925 IEM_OPCODE_GET_NEXT_U8(&bRm);
1926 if (IEM_IS_MODRM_MEM_MODE(bRm))
1927 {
1928 /** @todo testcase: check that there are two memory accesses involved. Check
1929 * whether they're both read before the \#BR triggers. */
1930 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1931 {
1932 IEM_MC_BEGIN(3, 1);
1933 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1934 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1935 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1937
1938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1940
1941 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
1942 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1943 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1944
1945 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1946 IEM_MC_END();
1947 }
1948 else /* 32-bit operands */
1949 {
1950 IEM_MC_BEGIN(3, 1);
1951 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1952 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1953 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1955
1956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1958
1959 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
1960 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1961 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1962
1963 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1964 IEM_MC_END();
1965 }
1966 }
1967
1968 /*
1969 * @opdone
1970 */
1971 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1972 {
1973 /* Note that there is no need for the CPU to fetch further bytes
1974 here because MODRM.MOD == 3. */
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return IEMOP_RAISE_INVALID_OPCODE();
1977 }
1978 }
1979 else
1980 {
1981 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1982 * does modr/m read, whereas AMD probably doesn't... */
1983 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1984 {
1985 Log(("evex not supported by the guest CPU!\n"));
1986 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1987 }
1988 IEM_OPCODE_GET_NEXT_U8(&bRm);
1989 }
1990
1991 IEMOP_MNEMONIC(evex, "evex");
1992 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1993 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1994 Log(("evex prefix is not implemented!\n"));
1995 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1996}
1997
1998
1999/** Opcode 0x63 - non-64-bit modes. */
2000FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2001{
2002 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2003 IEMOP_HLP_MIN_286();
2004 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2006
2007 if (IEM_IS_MODRM_REG_MODE(bRm))
2008 {
2009 /* Register */
2010 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2011 IEM_MC_BEGIN(3, 0);
2012 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2013 IEM_MC_ARG(uint16_t, u16Src, 1);
2014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2015
2016 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2017 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2018 IEM_MC_REF_EFLAGS(pEFlags);
2019 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2020
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 }
2024 else
2025 {
2026 /* Memory */
2027 IEM_MC_BEGIN(3, 2);
2028 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2029 IEM_MC_ARG(uint16_t, u16Src, 1);
2030 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2032
2033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2034 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2035 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2036 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2037 IEM_MC_FETCH_EFLAGS(EFlags);
2038 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2039
2040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2041 IEM_MC_COMMIT_EFLAGS(EFlags);
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 return VINF_SUCCESS;
2046
2047}
2048
2049
2050/**
2051 * @opcode 0x63
2052 *
2053 * @note This is a weird one. It works like a regular move instruction if
2054 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2055 * @todo This definitely needs a testcase to verify the odd cases. */
2056FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2057{
2058 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2059
2060 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2062
2063 if (IEM_IS_MODRM_REG_MODE(bRm))
2064 {
2065 /*
2066 * Register to register.
2067 */
2068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2069 IEM_MC_BEGIN(0, 1);
2070 IEM_MC_LOCAL(uint64_t, u64Value);
2071 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2072 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2073 IEM_MC_ADVANCE_RIP();
2074 IEM_MC_END();
2075 }
2076 else
2077 {
2078 /*
2079 * We're loading a register from memory.
2080 */
2081 IEM_MC_BEGIN(0, 2);
2082 IEM_MC_LOCAL(uint64_t, u64Value);
2083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2086 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2087 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 return VINF_SUCCESS;
2092}
2093
2094
2095/**
2096 * @opcode 0x64
2097 * @opmnemonic segfs
2098 * @opmincpu 80386
2099 * @opgroup og_prefixes
2100 */
2101FNIEMOP_DEF(iemOp_seg_FS)
2102{
2103 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2104 IEMOP_HLP_MIN_386();
2105
2106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2107 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2108
2109 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2110 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2111}
2112
2113
2114/**
2115 * @opcode 0x65
2116 * @opmnemonic seggs
2117 * @opmincpu 80386
2118 * @opgroup og_prefixes
2119 */
2120FNIEMOP_DEF(iemOp_seg_GS)
2121{
2122 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2123 IEMOP_HLP_MIN_386();
2124
2125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2126 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2127
2128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2130}
2131
2132
2133/**
2134 * @opcode 0x66
2135 * @opmnemonic opsize
2136 * @openc prefix
2137 * @opmincpu 80386
2138 * @ophints harmless
2139 * @opgroup og_prefixes
2140 */
2141FNIEMOP_DEF(iemOp_op_size)
2142{
2143 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2144 IEMOP_HLP_MIN_386();
2145
2146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2147 iemRecalEffOpSize(pVCpu);
2148
2149 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2150 when REPZ or REPNZ are present. */
2151 if (pVCpu->iem.s.idxPrefix == 0)
2152 pVCpu->iem.s.idxPrefix = 1;
2153
2154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2156}
2157
2158
2159/**
2160 * @opcode 0x67
2161 * @opmnemonic addrsize
2162 * @openc prefix
2163 * @opmincpu 80386
2164 * @ophints harmless
2165 * @opgroup og_prefixes
2166 */
2167FNIEMOP_DEF(iemOp_addr_size)
2168{
2169 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2170 IEMOP_HLP_MIN_386();
2171
2172 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2173 switch (pVCpu->iem.s.enmDefAddrMode)
2174 {
2175 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2176 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2177 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2178 default: AssertFailed();
2179 }
2180
2181 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2182 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2183}
2184
2185
2186/**
2187 * @opcode 0x68
2188 */
2189FNIEMOP_DEF(iemOp_push_Iz)
2190{
2191 IEMOP_MNEMONIC(push_Iz, "push Iz");
2192 IEMOP_HLP_MIN_186();
2193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 {
2198 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_BEGIN(0,0);
2201 IEM_MC_PUSH_U16(u16Imm);
2202 IEM_MC_ADVANCE_RIP();
2203 IEM_MC_END();
2204 return VINF_SUCCESS;
2205 }
2206
2207 case IEMMODE_32BIT:
2208 {
2209 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2211 IEM_MC_BEGIN(0,0);
2212 IEM_MC_PUSH_U32(u32Imm);
2213 IEM_MC_ADVANCE_RIP();
2214 IEM_MC_END();
2215 return VINF_SUCCESS;
2216 }
2217
2218 case IEMMODE_64BIT:
2219 {
2220 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2222 IEM_MC_BEGIN(0,0);
2223 IEM_MC_PUSH_U64(u64Imm);
2224 IEM_MC_ADVANCE_RIP();
2225 IEM_MC_END();
2226 return VINF_SUCCESS;
2227 }
2228
2229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2230 }
2231}
2232
2233
2234/**
2235 * @opcode 0x69
2236 */
2237FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2238{
2239 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2240 IEMOP_HLP_MIN_186();
2241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2243
2244 switch (pVCpu->iem.s.enmEffOpSize)
2245 {
2246 case IEMMODE_16BIT:
2247 {
2248 if (IEM_IS_MODRM_REG_MODE(bRm))
2249 {
2250 /* register operand */
2251 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2253
2254 IEM_MC_BEGIN(3, 1);
2255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2256 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2257 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2258 IEM_MC_LOCAL(uint16_t, u16Tmp);
2259
2260 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2261 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2262 IEM_MC_REF_EFLAGS(pEFlags);
2263 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2264 pu16Dst, u16Src, pEFlags);
2265 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2266
2267 IEM_MC_ADVANCE_RIP();
2268 IEM_MC_END();
2269 }
2270 else
2271 {
2272 /* memory operand */
2273 IEM_MC_BEGIN(3, 2);
2274 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2275 IEM_MC_ARG(uint16_t, u16Src, 1);
2276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2277 IEM_MC_LOCAL(uint16_t, u16Tmp);
2278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2279
2280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2281 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2282 IEM_MC_ASSIGN(u16Src, u16Imm);
2283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2284 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2285 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2286 IEM_MC_REF_EFLAGS(pEFlags);
2287 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2288 pu16Dst, u16Src, pEFlags);
2289 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2290
2291 IEM_MC_ADVANCE_RIP();
2292 IEM_MC_END();
2293 }
2294 return VINF_SUCCESS;
2295 }
2296
2297 case IEMMODE_32BIT:
2298 {
2299 if (IEM_IS_MODRM_REG_MODE(bRm))
2300 {
2301 /* register operand */
2302 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2304
2305 IEM_MC_BEGIN(3, 1);
2306 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2307 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2309 IEM_MC_LOCAL(uint32_t, u32Tmp);
2310
2311 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2312 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2313 IEM_MC_REF_EFLAGS(pEFlags);
2314 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2315 pu32Dst, u32Src, pEFlags);
2316 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2317
2318 IEM_MC_ADVANCE_RIP();
2319 IEM_MC_END();
2320 }
2321 else
2322 {
2323 /* memory operand */
2324 IEM_MC_BEGIN(3, 2);
2325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2326 IEM_MC_ARG(uint32_t, u32Src, 1);
2327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2328 IEM_MC_LOCAL(uint32_t, u32Tmp);
2329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2330
2331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2332 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2333 IEM_MC_ASSIGN(u32Src, u32Imm);
2334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2335 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2336 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2337 IEM_MC_REF_EFLAGS(pEFlags);
2338 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2339 pu32Dst, u32Src, pEFlags);
2340 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2341
2342 IEM_MC_ADVANCE_RIP();
2343 IEM_MC_END();
2344 }
2345 return VINF_SUCCESS;
2346 }
2347
2348 case IEMMODE_64BIT:
2349 {
2350 if (IEM_IS_MODRM_REG_MODE(bRm))
2351 {
2352 /* register operand */
2353 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2355
2356 IEM_MC_BEGIN(3, 1);
2357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2358 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2359 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2360 IEM_MC_LOCAL(uint64_t, u64Tmp);
2361
2362 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2363 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2364 IEM_MC_REF_EFLAGS(pEFlags);
2365 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2366 pu64Dst, u64Src, pEFlags);
2367 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2368
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /* memory operand */
2375 IEM_MC_BEGIN(3, 2);
2376 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2377 IEM_MC_ARG(uint64_t, u64Src, 1);
2378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2379 IEM_MC_LOCAL(uint64_t, u64Tmp);
2380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2381
2382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2383 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2384 IEM_MC_ASSIGN(u64Src, u64Imm);
2385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2386 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2387 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2388 IEM_MC_REF_EFLAGS(pEFlags);
2389 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2390 pu64Dst, u64Src, pEFlags);
2391 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2392
2393 IEM_MC_ADVANCE_RIP();
2394 IEM_MC_END();
2395 }
2396 return VINF_SUCCESS;
2397 }
2398 }
2399 AssertFailedReturn(VERR_IEM_IPE_9);
2400}
2401
2402
2403/**
2404 * @opcode 0x6a
2405 */
2406FNIEMOP_DEF(iemOp_push_Ib)
2407{
2408 IEMOP_MNEMONIC(push_Ib, "push Ib");
2409 IEMOP_HLP_MIN_186();
2410 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2413
2414 IEM_MC_BEGIN(0,0);
2415 switch (pVCpu->iem.s.enmEffOpSize)
2416 {
2417 case IEMMODE_16BIT:
2418 IEM_MC_PUSH_U16(i8Imm);
2419 break;
2420 case IEMMODE_32BIT:
2421 IEM_MC_PUSH_U32(i8Imm);
2422 break;
2423 case IEMMODE_64BIT:
2424 IEM_MC_PUSH_U64(i8Imm);
2425 break;
2426 }
2427 IEM_MC_ADVANCE_RIP();
2428 IEM_MC_END();
2429 return VINF_SUCCESS;
2430}
2431
2432
2433/**
2434 * @opcode 0x6b
2435 */
2436FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2437{
2438 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2439 IEMOP_HLP_MIN_186();
2440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2441 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2442
2443 switch (pVCpu->iem.s.enmEffOpSize)
2444 {
2445 case IEMMODE_16BIT:
2446 if (IEM_IS_MODRM_REG_MODE(bRm))
2447 {
2448 /* register operand */
2449 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451
2452 IEM_MC_BEGIN(3, 1);
2453 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2454 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2455 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2456 IEM_MC_LOCAL(uint16_t, u16Tmp);
2457
2458 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2459 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2460 IEM_MC_REF_EFLAGS(pEFlags);
2461 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2462 pu16Dst, u16Src, pEFlags);
2463 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2464
2465 IEM_MC_ADVANCE_RIP();
2466 IEM_MC_END();
2467 }
2468 else
2469 {
2470 /* memory operand */
2471 IEM_MC_BEGIN(3, 2);
2472 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2473 IEM_MC_ARG(uint16_t, u16Src, 1);
2474 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2475 IEM_MC_LOCAL(uint16_t, u16Tmp);
2476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2477
2478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2479 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2480 IEM_MC_ASSIGN(u16Src, u16Imm);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2483 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2484 IEM_MC_REF_EFLAGS(pEFlags);
2485 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2486 pu16Dst, u16Src, pEFlags);
2487 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2488
2489 IEM_MC_ADVANCE_RIP();
2490 IEM_MC_END();
2491 }
2492 return VINF_SUCCESS;
2493
2494 case IEMMODE_32BIT:
2495 if (IEM_IS_MODRM_REG_MODE(bRm))
2496 {
2497 /* register operand */
2498 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500
2501 IEM_MC_BEGIN(3, 1);
2502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2503 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint32_t, u32Tmp);
2506
2507 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2508 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2509 IEM_MC_REF_EFLAGS(pEFlags);
2510 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2511 pu32Dst, u32Src, pEFlags);
2512 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2513
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 else
2518 {
2519 /* memory operand */
2520 IEM_MC_BEGIN(3, 2);
2521 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2522 IEM_MC_ARG(uint32_t, u32Src, 1);
2523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2524 IEM_MC_LOCAL(uint32_t, u32Tmp);
2525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2526
2527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2528 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2529 IEM_MC_ASSIGN(u32Src, u32Imm);
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2531 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2532 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2533 IEM_MC_REF_EFLAGS(pEFlags);
2534 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2535 pu32Dst, u32Src, pEFlags);
2536 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 return VINF_SUCCESS;
2542
2543 case IEMMODE_64BIT:
2544 if (IEM_IS_MODRM_REG_MODE(bRm))
2545 {
2546 /* register operand */
2547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549
2550 IEM_MC_BEGIN(3, 1);
2551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2552 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2554 IEM_MC_LOCAL(uint64_t, u64Tmp);
2555
2556 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2557 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2558 IEM_MC_REF_EFLAGS(pEFlags);
2559 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2560 pu64Dst, u64Src, pEFlags);
2561 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2562
2563 IEM_MC_ADVANCE_RIP();
2564 IEM_MC_END();
2565 }
2566 else
2567 {
2568 /* memory operand */
2569 IEM_MC_BEGIN(3, 2);
2570 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2571 IEM_MC_ARG(uint64_t, u64Src, 1);
2572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2573 IEM_MC_LOCAL(uint64_t, u64Tmp);
2574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2575
2576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2577 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2578 IEM_MC_ASSIGN(u64Src, u64Imm);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2581 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2582 IEM_MC_REF_EFLAGS(pEFlags);
2583 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2584 pu64Dst, u64Src, pEFlags);
2585 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591 }
2592 AssertFailedReturn(VERR_IEM_IPE_8);
2593}
2594
2595
2596/**
2597 * @opcode 0x6c
2598 */
2599FNIEMOP_DEF(iemOp_insb_Yb_DX)
2600{
2601 IEMOP_HLP_MIN_186();
2602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2603 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2604 {
2605 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2606 switch (pVCpu->iem.s.enmEffAddrMode)
2607 {
2608 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2609 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2610 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2612 }
2613 }
2614 else
2615 {
2616 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2617 switch (pVCpu->iem.s.enmEffAddrMode)
2618 {
2619 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2623 }
2624 }
2625}
2626
2627
2628/**
2629 * @opcode 0x6d
2630 */
2631FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2632{
2633 IEMOP_HLP_MIN_186();
2634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2636 {
2637 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2638 switch (pVCpu->iem.s.enmEffOpSize)
2639 {
2640 case IEMMODE_16BIT:
2641 switch (pVCpu->iem.s.enmEffAddrMode)
2642 {
2643 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2644 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2645 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2647 }
2648 break;
2649 case IEMMODE_64BIT:
2650 case IEMMODE_32BIT:
2651 switch (pVCpu->iem.s.enmEffAddrMode)
2652 {
2653 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2657 }
2658 break;
2659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2660 }
2661 }
2662 else
2663 {
2664 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2665 switch (pVCpu->iem.s.enmEffOpSize)
2666 {
2667 case IEMMODE_16BIT:
2668 switch (pVCpu->iem.s.enmEffAddrMode)
2669 {
2670 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2671 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2672 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2674 }
2675 break;
2676 case IEMMODE_64BIT:
2677 case IEMMODE_32BIT:
2678 switch (pVCpu->iem.s.enmEffAddrMode)
2679 {
2680 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2681 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2682 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2684 }
2685 break;
2686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2687 }
2688 }
2689}
2690
2691
2692/**
2693 * @opcode 0x6e
2694 */
2695FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2696{
2697 IEMOP_HLP_MIN_186();
2698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2699 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2700 {
2701 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2702 switch (pVCpu->iem.s.enmEffAddrMode)
2703 {
2704 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2705 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2706 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2708 }
2709 }
2710 else
2711 {
2712 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2713 switch (pVCpu->iem.s.enmEffAddrMode)
2714 {
2715 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2719 }
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x6f
2726 */
2727FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2728{
2729 IEMOP_HLP_MIN_186();
2730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2731 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2732 {
2733 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2734 switch (pVCpu->iem.s.enmEffOpSize)
2735 {
2736 case IEMMODE_16BIT:
2737 switch (pVCpu->iem.s.enmEffAddrMode)
2738 {
2739 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2740 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2741 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2743 }
2744 break;
2745 case IEMMODE_64BIT:
2746 case IEMMODE_32BIT:
2747 switch (pVCpu->iem.s.enmEffAddrMode)
2748 {
2749 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2750 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2751 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2753 }
2754 break;
2755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2756 }
2757 }
2758 else
2759 {
2760 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2761 switch (pVCpu->iem.s.enmEffOpSize)
2762 {
2763 case IEMMODE_16BIT:
2764 switch (pVCpu->iem.s.enmEffAddrMode)
2765 {
2766 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2767 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2768 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2770 }
2771 break;
2772 case IEMMODE_64BIT:
2773 case IEMMODE_32BIT:
2774 switch (pVCpu->iem.s.enmEffAddrMode)
2775 {
2776 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2777 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2778 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2780 }
2781 break;
2782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2783 }
2784 }
2785}
2786
2787
2788/**
2789 * @opcode 0x70
2790 */
2791FNIEMOP_DEF(iemOp_jo_Jb)
2792{
2793 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2794 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2797
2798 IEM_MC_BEGIN(0, 0);
2799 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2800 IEM_MC_REL_JMP_S8(i8Imm);
2801 } IEM_MC_ELSE() {
2802 IEM_MC_ADVANCE_RIP();
2803 } IEM_MC_ENDIF();
2804 IEM_MC_END();
2805 return VINF_SUCCESS;
2806}
2807
2808
2809/**
2810 * @opcode 0x71
2811 */
2812FNIEMOP_DEF(iemOp_jno_Jb)
2813{
2814 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2817 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2818
2819 IEM_MC_BEGIN(0, 0);
2820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2821 IEM_MC_ADVANCE_RIP();
2822 } IEM_MC_ELSE() {
2823 IEM_MC_REL_JMP_S8(i8Imm);
2824 } IEM_MC_ENDIF();
2825 IEM_MC_END();
2826 return VINF_SUCCESS;
2827}
2828
2829/**
2830 * @opcode 0x72
2831 */
2832FNIEMOP_DEF(iemOp_jc_Jb)
2833{
2834 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2838
2839 IEM_MC_BEGIN(0, 0);
2840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2841 IEM_MC_REL_JMP_S8(i8Imm);
2842 } IEM_MC_ELSE() {
2843 IEM_MC_ADVANCE_RIP();
2844 } IEM_MC_ENDIF();
2845 IEM_MC_END();
2846 return VINF_SUCCESS;
2847}
2848
2849
2850/**
2851 * @opcode 0x73
2852 */
2853FNIEMOP_DEF(iemOp_jnc_Jb)
2854{
2855 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2856 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2859
2860 IEM_MC_BEGIN(0, 0);
2861 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2862 IEM_MC_ADVANCE_RIP();
2863 } IEM_MC_ELSE() {
2864 IEM_MC_REL_JMP_S8(i8Imm);
2865 } IEM_MC_ENDIF();
2866 IEM_MC_END();
2867 return VINF_SUCCESS;
2868}
2869
2870
2871/**
2872 * @opcode 0x74
2873 */
2874FNIEMOP_DEF(iemOp_je_Jb)
2875{
2876 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2877 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2880
2881 IEM_MC_BEGIN(0, 0);
2882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2883 IEM_MC_REL_JMP_S8(i8Imm);
2884 } IEM_MC_ELSE() {
2885 IEM_MC_ADVANCE_RIP();
2886 } IEM_MC_ENDIF();
2887 IEM_MC_END();
2888 return VINF_SUCCESS;
2889}
2890
2891
2892/**
2893 * @opcode 0x75
2894 */
2895FNIEMOP_DEF(iemOp_jne_Jb)
2896{
2897 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2898 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2901
2902 IEM_MC_BEGIN(0, 0);
2903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2904 IEM_MC_ADVANCE_RIP();
2905 } IEM_MC_ELSE() {
2906 IEM_MC_REL_JMP_S8(i8Imm);
2907 } IEM_MC_ENDIF();
2908 IEM_MC_END();
2909 return VINF_SUCCESS;
2910}
2911
2912
2913/**
2914 * @opcode 0x76
2915 */
2916FNIEMOP_DEF(iemOp_jbe_Jb)
2917{
2918 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2919 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2921 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2922
2923 IEM_MC_BEGIN(0, 0);
2924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2925 IEM_MC_REL_JMP_S8(i8Imm);
2926 } IEM_MC_ELSE() {
2927 IEM_MC_ADVANCE_RIP();
2928 } IEM_MC_ENDIF();
2929 IEM_MC_END();
2930 return VINF_SUCCESS;
2931}
2932
2933
2934/**
2935 * @opcode 0x77
2936 */
2937FNIEMOP_DEF(iemOp_jnbe_Jb)
2938{
2939 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2940 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2943
2944 IEM_MC_BEGIN(0, 0);
2945 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2946 IEM_MC_ADVANCE_RIP();
2947 } IEM_MC_ELSE() {
2948 IEM_MC_REL_JMP_S8(i8Imm);
2949 } IEM_MC_ENDIF();
2950 IEM_MC_END();
2951 return VINF_SUCCESS;
2952}
2953
2954
2955/**
2956 * @opcode 0x78
2957 */
2958FNIEMOP_DEF(iemOp_js_Jb)
2959{
2960 IEMOP_MNEMONIC(js_Jb, "js Jb");
2961 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2964
2965 IEM_MC_BEGIN(0, 0);
2966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2967 IEM_MC_REL_JMP_S8(i8Imm);
2968 } IEM_MC_ELSE() {
2969 IEM_MC_ADVANCE_RIP();
2970 } IEM_MC_ENDIF();
2971 IEM_MC_END();
2972 return VINF_SUCCESS;
2973}
2974
2975
2976/**
2977 * @opcode 0x79
2978 */
2979FNIEMOP_DEF(iemOp_jns_Jb)
2980{
2981 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2982 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2985
2986 IEM_MC_BEGIN(0, 0);
2987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2988 IEM_MC_ADVANCE_RIP();
2989 } IEM_MC_ELSE() {
2990 IEM_MC_REL_JMP_S8(i8Imm);
2991 } IEM_MC_ENDIF();
2992 IEM_MC_END();
2993 return VINF_SUCCESS;
2994}
2995
2996
2997/**
2998 * @opcode 0x7a
2999 */
3000FNIEMOP_DEF(iemOp_jp_Jb)
3001{
3002 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3003 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3006
3007 IEM_MC_BEGIN(0, 0);
3008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3009 IEM_MC_REL_JMP_S8(i8Imm);
3010 } IEM_MC_ELSE() {
3011 IEM_MC_ADVANCE_RIP();
3012 } IEM_MC_ENDIF();
3013 IEM_MC_END();
3014 return VINF_SUCCESS;
3015}
3016
3017
3018/**
3019 * @opcode 0x7b
3020 */
3021FNIEMOP_DEF(iemOp_jnp_Jb)
3022{
3023 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3024 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3027
3028 IEM_MC_BEGIN(0, 0);
3029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3030 IEM_MC_ADVANCE_RIP();
3031 } IEM_MC_ELSE() {
3032 IEM_MC_REL_JMP_S8(i8Imm);
3033 } IEM_MC_ENDIF();
3034 IEM_MC_END();
3035 return VINF_SUCCESS;
3036}
3037
3038
3039/**
3040 * @opcode 0x7c
3041 */
3042FNIEMOP_DEF(iemOp_jl_Jb)
3043{
3044 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3045 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3048
3049 IEM_MC_BEGIN(0, 0);
3050 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3051 IEM_MC_REL_JMP_S8(i8Imm);
3052 } IEM_MC_ELSE() {
3053 IEM_MC_ADVANCE_RIP();
3054 } IEM_MC_ENDIF();
3055 IEM_MC_END();
3056 return VINF_SUCCESS;
3057}
3058
3059
3060/**
3061 * @opcode 0x7d
3062 */
3063FNIEMOP_DEF(iemOp_jnl_Jb)
3064{
3065 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3066 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3069
3070 IEM_MC_BEGIN(0, 0);
3071 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3072 IEM_MC_ADVANCE_RIP();
3073 } IEM_MC_ELSE() {
3074 IEM_MC_REL_JMP_S8(i8Imm);
3075 } IEM_MC_ENDIF();
3076 IEM_MC_END();
3077 return VINF_SUCCESS;
3078}
3079
3080
3081/**
3082 * @opcode 0x7e
3083 */
3084FNIEMOP_DEF(iemOp_jle_Jb)
3085{
3086 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3087 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3090
3091 IEM_MC_BEGIN(0, 0);
3092 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3093 IEM_MC_REL_JMP_S8(i8Imm);
3094 } IEM_MC_ELSE() {
3095 IEM_MC_ADVANCE_RIP();
3096 } IEM_MC_ENDIF();
3097 IEM_MC_END();
3098 return VINF_SUCCESS;
3099}
3100
3101
3102/**
3103 * @opcode 0x7f
3104 */
3105FNIEMOP_DEF(iemOp_jnle_Jb)
3106{
3107 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3108 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3111
3112 IEM_MC_BEGIN(0, 0);
3113 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3114 IEM_MC_ADVANCE_RIP();
3115 } IEM_MC_ELSE() {
3116 IEM_MC_REL_JMP_S8(i8Imm);
3117 } IEM_MC_ENDIF();
3118 IEM_MC_END();
3119 return VINF_SUCCESS;
3120}
3121
3122
3123/**
3124 * @opcode 0x80
3125 */
3126FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 switch (IEM_GET_MODRM_REG_8(bRm))
3130 {
3131 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3132 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3133 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3134 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3135 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3136 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3137 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3138 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3139 }
3140 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3141
3142 if (IEM_IS_MODRM_REG_MODE(bRm))
3143 {
3144 /* register target */
3145 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3147 IEM_MC_BEGIN(3, 0);
3148 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3149 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3151
3152 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3153 IEM_MC_REF_EFLAGS(pEFlags);
3154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3155
3156 IEM_MC_ADVANCE_RIP();
3157 IEM_MC_END();
3158 }
3159 else
3160 {
3161 /* memory target */
3162 uint32_t fAccess;
3163 if (pImpl->pfnLockedU8)
3164 fAccess = IEM_ACCESS_DATA_RW;
3165 else /* CMP */
3166 fAccess = IEM_ACCESS_DATA_R;
3167 IEM_MC_BEGIN(3, 2);
3168 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3169 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3171
3172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3173 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3174 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3175 if (pImpl->pfnLockedU8)
3176 IEMOP_HLP_DONE_DECODING();
3177 else
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3179
3180 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3181 IEM_MC_FETCH_EFLAGS(EFlags);
3182 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3184 else
3185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3186
3187 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3188 IEM_MC_COMMIT_EFLAGS(EFlags);
3189 IEM_MC_ADVANCE_RIP();
3190 IEM_MC_END();
3191 }
3192 return VINF_SUCCESS;
3193}
3194
3195
3196/**
3197 * @opcode 0x81
3198 */
3199FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3200{
3201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3202 switch (IEM_GET_MODRM_REG_8(bRm))
3203 {
3204 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3205 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3206 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3207 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3208 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3209 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3210 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3211 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3212 }
3213 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3214
3215 switch (pVCpu->iem.s.enmEffOpSize)
3216 {
3217 case IEMMODE_16BIT:
3218 {
3219 if (IEM_IS_MODRM_REG_MODE(bRm))
3220 {
3221 /* register target */
3222 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_BEGIN(3, 0);
3225 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3226 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3228
3229 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3230 IEM_MC_REF_EFLAGS(pEFlags);
3231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3232
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 }
3236 else
3237 {
3238 /* memory target */
3239 uint32_t fAccess;
3240 if (pImpl->pfnLockedU16)
3241 fAccess = IEM_ACCESS_DATA_RW;
3242 else /* CMP, TEST */
3243 fAccess = IEM_ACCESS_DATA_R;
3244 IEM_MC_BEGIN(3, 2);
3245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3246 IEM_MC_ARG(uint16_t, u16Src, 1);
3247 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3249
3250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3251 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3252 IEM_MC_ASSIGN(u16Src, u16Imm);
3253 if (pImpl->pfnLockedU16)
3254 IEMOP_HLP_DONE_DECODING();
3255 else
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3258 IEM_MC_FETCH_EFLAGS(EFlags);
3259 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3261 else
3262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3263
3264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3265 IEM_MC_COMMIT_EFLAGS(EFlags);
3266 IEM_MC_ADVANCE_RIP();
3267 IEM_MC_END();
3268 }
3269 break;
3270 }
3271
3272 case IEMMODE_32BIT:
3273 {
3274 if (IEM_IS_MODRM_REG_MODE(bRm))
3275 {
3276 /* register target */
3277 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 IEM_MC_BEGIN(3, 0);
3280 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3281 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3283
3284 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3285 IEM_MC_REF_EFLAGS(pEFlags);
3286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3287 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3288
3289 IEM_MC_ADVANCE_RIP();
3290 IEM_MC_END();
3291 }
3292 else
3293 {
3294 /* memory target */
3295 uint32_t fAccess;
3296 if (pImpl->pfnLockedU32)
3297 fAccess = IEM_ACCESS_DATA_RW;
3298 else /* CMP, TEST */
3299 fAccess = IEM_ACCESS_DATA_R;
3300 IEM_MC_BEGIN(3, 2);
3301 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3302 IEM_MC_ARG(uint32_t, u32Src, 1);
3303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3305
3306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3307 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3308 IEM_MC_ASSIGN(u32Src, u32Imm);
3309 if (pImpl->pfnLockedU32)
3310 IEMOP_HLP_DONE_DECODING();
3311 else
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3314 IEM_MC_FETCH_EFLAGS(EFlags);
3315 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3317 else
3318 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3319
3320 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3321 IEM_MC_COMMIT_EFLAGS(EFlags);
3322 IEM_MC_ADVANCE_RIP();
3323 IEM_MC_END();
3324 }
3325 break;
3326 }
3327
3328 case IEMMODE_64BIT:
3329 {
3330 if (IEM_IS_MODRM_REG_MODE(bRm))
3331 {
3332 /* register target */
3333 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_BEGIN(3, 0);
3336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3337 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3338 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3339
3340 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3341 IEM_MC_REF_EFLAGS(pEFlags);
3342 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3343
3344 IEM_MC_ADVANCE_RIP();
3345 IEM_MC_END();
3346 }
3347 else
3348 {
3349 /* memory target */
3350 uint32_t fAccess;
3351 if (pImpl->pfnLockedU64)
3352 fAccess = IEM_ACCESS_DATA_RW;
3353 else /* CMP */
3354 fAccess = IEM_ACCESS_DATA_R;
3355 IEM_MC_BEGIN(3, 2);
3356 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3357 IEM_MC_ARG(uint64_t, u64Src, 1);
3358 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3360
3361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3362 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3363 if (pImpl->pfnLockedU64)
3364 IEMOP_HLP_DONE_DECODING();
3365 else
3366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3367 IEM_MC_ASSIGN(u64Src, u64Imm);
3368 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3369 IEM_MC_FETCH_EFLAGS(EFlags);
3370 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3372 else
3373 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3374
3375 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3376 IEM_MC_COMMIT_EFLAGS(EFlags);
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 break;
3381 }
3382 }
3383 return VINF_SUCCESS;
3384}
3385
3386
3387/**
3388 * @opcode 0x82
3389 * @opmnemonic grp1_82
3390 * @opgroup og_groups
3391 */
3392FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3393{
3394 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3395 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3396}
3397
3398
3399/**
3400 * @opcode 0x83
3401 */
3402FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3403{
3404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3405 switch (IEM_GET_MODRM_REG_8(bRm))
3406 {
3407 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3408 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3409 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3410 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3411 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3412 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3413 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3414 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3415 }
3416 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3417 to the 386 even if absent in the intel reference manuals and some
3418 3rd party opcode listings. */
3419 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3420
3421 if (IEM_IS_MODRM_REG_MODE(bRm))
3422 {
3423 /*
3424 * Register target
3425 */
3426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3427 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3428 switch (pVCpu->iem.s.enmEffOpSize)
3429 {
3430 case IEMMODE_16BIT:
3431 {
3432 IEM_MC_BEGIN(3, 0);
3433 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3434 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3435 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3436
3437 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3438 IEM_MC_REF_EFLAGS(pEFlags);
3439 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3440
3441 IEM_MC_ADVANCE_RIP();
3442 IEM_MC_END();
3443 break;
3444 }
3445
3446 case IEMMODE_32BIT:
3447 {
3448 IEM_MC_BEGIN(3, 0);
3449 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3450 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3451 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3452
3453 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3454 IEM_MC_REF_EFLAGS(pEFlags);
3455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3456 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3457
3458 IEM_MC_ADVANCE_RIP();
3459 IEM_MC_END();
3460 break;
3461 }
3462
3463 case IEMMODE_64BIT:
3464 {
3465 IEM_MC_BEGIN(3, 0);
3466 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3467 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3468 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3469
3470 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3471 IEM_MC_REF_EFLAGS(pEFlags);
3472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3473
3474 IEM_MC_ADVANCE_RIP();
3475 IEM_MC_END();
3476 break;
3477 }
3478 }
3479 }
3480 else
3481 {
3482 /*
3483 * Memory target.
3484 */
3485 uint32_t fAccess;
3486 if (pImpl->pfnLockedU16)
3487 fAccess = IEM_ACCESS_DATA_RW;
3488 else /* CMP */
3489 fAccess = IEM_ACCESS_DATA_R;
3490
3491 switch (pVCpu->iem.s.enmEffOpSize)
3492 {
3493 case IEMMODE_16BIT:
3494 {
3495 IEM_MC_BEGIN(3, 2);
3496 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3497 IEM_MC_ARG(uint16_t, u16Src, 1);
3498 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3500
3501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3503 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3504 if (pImpl->pfnLockedU16)
3505 IEMOP_HLP_DONE_DECODING();
3506 else
3507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3508 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3509 IEM_MC_FETCH_EFLAGS(EFlags);
3510 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3511 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3512 else
3513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3514
3515 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3516 IEM_MC_COMMIT_EFLAGS(EFlags);
3517 IEM_MC_ADVANCE_RIP();
3518 IEM_MC_END();
3519 break;
3520 }
3521
3522 case IEMMODE_32BIT:
3523 {
3524 IEM_MC_BEGIN(3, 2);
3525 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3526 IEM_MC_ARG(uint32_t, u32Src, 1);
3527 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3529
3530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3531 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3532 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3533 if (pImpl->pfnLockedU32)
3534 IEMOP_HLP_DONE_DECODING();
3535 else
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3538 IEM_MC_FETCH_EFLAGS(EFlags);
3539 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3540 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3541 else
3542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3543
3544 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3545 IEM_MC_COMMIT_EFLAGS(EFlags);
3546 IEM_MC_ADVANCE_RIP();
3547 IEM_MC_END();
3548 break;
3549 }
3550
3551 case IEMMODE_64BIT:
3552 {
3553 IEM_MC_BEGIN(3, 2);
3554 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3555 IEM_MC_ARG(uint64_t, u64Src, 1);
3556 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3558
3559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3560 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3561 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3562 if (pImpl->pfnLockedU64)
3563 IEMOP_HLP_DONE_DECODING();
3564 else
3565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3566 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3567 IEM_MC_FETCH_EFLAGS(EFlags);
3568 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3569 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3570 else
3571 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3572
3573 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3574 IEM_MC_COMMIT_EFLAGS(EFlags);
3575 IEM_MC_ADVANCE_RIP();
3576 IEM_MC_END();
3577 break;
3578 }
3579 }
3580 }
3581 return VINF_SUCCESS;
3582}
3583
3584
3585/**
3586 * @opcode 0x84
3587 */
3588FNIEMOP_DEF(iemOp_test_Eb_Gb)
3589{
3590 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3592 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3593}
3594
3595
3596/**
3597 * @opcode 0x85
3598 */
3599FNIEMOP_DEF(iemOp_test_Ev_Gv)
3600{
3601 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3602 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3603 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3604}
3605
3606
3607/**
3608 * @opcode 0x86
3609 */
3610FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3611{
3612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3613 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3614
3615 /*
3616 * If rm is denoting a register, no more instruction bytes.
3617 */
3618 if (IEM_IS_MODRM_REG_MODE(bRm))
3619 {
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621
3622 IEM_MC_BEGIN(0, 2);
3623 IEM_MC_LOCAL(uint8_t, uTmp1);
3624 IEM_MC_LOCAL(uint8_t, uTmp2);
3625
3626 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3627 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3628 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3629 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3630
3631 IEM_MC_ADVANCE_RIP();
3632 IEM_MC_END();
3633 }
3634 else
3635 {
3636 /*
3637 * We're accessing memory.
3638 */
3639/** @todo the register must be committed separately! */
3640 IEM_MC_BEGIN(2, 2);
3641 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3642 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3644
3645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3646 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3647 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3648 if (!pVCpu->iem.s.fDisregardLock)
3649 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3650 else
3651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3653
3654 IEM_MC_ADVANCE_RIP();
3655 IEM_MC_END();
3656 }
3657 return VINF_SUCCESS;
3658}
3659
3660
3661/**
3662 * @opcode 0x87
3663 */
3664FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3665{
3666 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3668
3669 /*
3670 * If rm is denoting a register, no more instruction bytes.
3671 */
3672 if (IEM_IS_MODRM_REG_MODE(bRm))
3673 {
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675
3676 switch (pVCpu->iem.s.enmEffOpSize)
3677 {
3678 case IEMMODE_16BIT:
3679 IEM_MC_BEGIN(0, 2);
3680 IEM_MC_LOCAL(uint16_t, uTmp1);
3681 IEM_MC_LOCAL(uint16_t, uTmp2);
3682
3683 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3684 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3685 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3686 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3687
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 return VINF_SUCCESS;
3691
3692 case IEMMODE_32BIT:
3693 IEM_MC_BEGIN(0, 2);
3694 IEM_MC_LOCAL(uint32_t, uTmp1);
3695 IEM_MC_LOCAL(uint32_t, uTmp2);
3696
3697 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3698 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3699 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3700 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3701
3702 IEM_MC_ADVANCE_RIP();
3703 IEM_MC_END();
3704 return VINF_SUCCESS;
3705
3706 case IEMMODE_64BIT:
3707 IEM_MC_BEGIN(0, 2);
3708 IEM_MC_LOCAL(uint64_t, uTmp1);
3709 IEM_MC_LOCAL(uint64_t, uTmp2);
3710
3711 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3712 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3713 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3714 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3715
3716 IEM_MC_ADVANCE_RIP();
3717 IEM_MC_END();
3718 return VINF_SUCCESS;
3719
3720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3721 }
3722 }
3723 else
3724 {
3725 /*
3726 * We're accessing memory.
3727 */
3728 switch (pVCpu->iem.s.enmEffOpSize)
3729 {
3730/** @todo the register must be committed separately! */
3731 case IEMMODE_16BIT:
3732 IEM_MC_BEGIN(2, 2);
3733 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3734 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3736
3737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3738 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3739 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3740 if (!pVCpu->iem.s.fDisregardLock)
3741 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3742 else
3743 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3744 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3745
3746 IEM_MC_ADVANCE_RIP();
3747 IEM_MC_END();
3748 return VINF_SUCCESS;
3749
3750 case IEMMODE_32BIT:
3751 IEM_MC_BEGIN(2, 2);
3752 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3753 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3755
3756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3757 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3758 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3759 if (!pVCpu->iem.s.fDisregardLock)
3760 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3761 else
3762 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3764
3765 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3766 IEM_MC_ADVANCE_RIP();
3767 IEM_MC_END();
3768 return VINF_SUCCESS;
3769
3770 case IEMMODE_64BIT:
3771 IEM_MC_BEGIN(2, 2);
3772 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3773 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3775
3776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3777 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3778 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3779 if (!pVCpu->iem.s.fDisregardLock)
3780 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3781 else
3782 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3783 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3784
3785 IEM_MC_ADVANCE_RIP();
3786 IEM_MC_END();
3787 return VINF_SUCCESS;
3788
3789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3790 }
3791 }
3792}
3793
3794
3795/**
3796 * @opcode 0x88
3797 */
3798FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3799{
3800 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3801
3802 uint8_t bRm;
3803 IEM_OPCODE_GET_NEXT_U8(&bRm);
3804
3805 /*
3806 * If rm is denoting a register, no more instruction bytes.
3807 */
3808 if (IEM_IS_MODRM_REG_MODE(bRm))
3809 {
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3811 IEM_MC_BEGIN(0, 1);
3812 IEM_MC_LOCAL(uint8_t, u8Value);
3813 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3814 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
3815 IEM_MC_ADVANCE_RIP();
3816 IEM_MC_END();
3817 }
3818 else
3819 {
3820 /*
3821 * We're writing a register to memory.
3822 */
3823 IEM_MC_BEGIN(0, 2);
3824 IEM_MC_LOCAL(uint8_t, u8Value);
3825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3828 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3829 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 }
3833 return VINF_SUCCESS;
3834
3835}
3836
3837
3838/**
3839 * @opcode 0x89
3840 */
3841FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3842{
3843 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3844
3845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3846
3847 /*
3848 * If rm is denoting a register, no more instruction bytes.
3849 */
3850 if (IEM_IS_MODRM_REG_MODE(bRm))
3851 {
3852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3853 switch (pVCpu->iem.s.enmEffOpSize)
3854 {
3855 case IEMMODE_16BIT:
3856 IEM_MC_BEGIN(0, 1);
3857 IEM_MC_LOCAL(uint16_t, u16Value);
3858 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3859 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
3860 IEM_MC_ADVANCE_RIP();
3861 IEM_MC_END();
3862 break;
3863
3864 case IEMMODE_32BIT:
3865 IEM_MC_BEGIN(0, 1);
3866 IEM_MC_LOCAL(uint32_t, u32Value);
3867 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3868 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
3869 IEM_MC_ADVANCE_RIP();
3870 IEM_MC_END();
3871 break;
3872
3873 case IEMMODE_64BIT:
3874 IEM_MC_BEGIN(0, 1);
3875 IEM_MC_LOCAL(uint64_t, u64Value);
3876 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3877 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
3878 IEM_MC_ADVANCE_RIP();
3879 IEM_MC_END();
3880 break;
3881 }
3882 }
3883 else
3884 {
3885 /*
3886 * We're writing a register to memory.
3887 */
3888 switch (pVCpu->iem.s.enmEffOpSize)
3889 {
3890 case IEMMODE_16BIT:
3891 IEM_MC_BEGIN(0, 2);
3892 IEM_MC_LOCAL(uint16_t, u16Value);
3893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3896 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3897 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3898 IEM_MC_ADVANCE_RIP();
3899 IEM_MC_END();
3900 break;
3901
3902 case IEMMODE_32BIT:
3903 IEM_MC_BEGIN(0, 2);
3904 IEM_MC_LOCAL(uint32_t, u32Value);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3908 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3909 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 break;
3913
3914 case IEMMODE_64BIT:
3915 IEM_MC_BEGIN(0, 2);
3916 IEM_MC_LOCAL(uint64_t, u64Value);
3917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3921 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3922 IEM_MC_ADVANCE_RIP();
3923 IEM_MC_END();
3924 break;
3925 }
3926 }
3927 return VINF_SUCCESS;
3928}
3929
3930
3931/**
3932 * @opcode 0x8a
3933 */
3934FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3935{
3936 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3937
3938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3939
3940 /*
3941 * If rm is denoting a register, no more instruction bytes.
3942 */
3943 if (IEM_IS_MODRM_REG_MODE(bRm))
3944 {
3945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3946 IEM_MC_BEGIN(0, 1);
3947 IEM_MC_LOCAL(uint8_t, u8Value);
3948 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3949 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 else
3954 {
3955 /*
3956 * We're loading a register from memory.
3957 */
3958 IEM_MC_BEGIN(0, 2);
3959 IEM_MC_LOCAL(uint8_t, u8Value);
3960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3964 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3965 IEM_MC_ADVANCE_RIP();
3966 IEM_MC_END();
3967 }
3968 return VINF_SUCCESS;
3969}
3970
3971
3972/**
3973 * @opcode 0x8b
3974 */
3975FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3976{
3977 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3978
3979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3980
3981 /*
3982 * If rm is denoting a register, no more instruction bytes.
3983 */
3984 if (IEM_IS_MODRM_REG_MODE(bRm))
3985 {
3986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3987 switch (pVCpu->iem.s.enmEffOpSize)
3988 {
3989 case IEMMODE_16BIT:
3990 IEM_MC_BEGIN(0, 1);
3991 IEM_MC_LOCAL(uint16_t, u16Value);
3992 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3993 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
3994 IEM_MC_ADVANCE_RIP();
3995 IEM_MC_END();
3996 break;
3997
3998 case IEMMODE_32BIT:
3999 IEM_MC_BEGIN(0, 1);
4000 IEM_MC_LOCAL(uint32_t, u32Value);
4001 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4002 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4003 IEM_MC_ADVANCE_RIP();
4004 IEM_MC_END();
4005 break;
4006
4007 case IEMMODE_64BIT:
4008 IEM_MC_BEGIN(0, 1);
4009 IEM_MC_LOCAL(uint64_t, u64Value);
4010 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4011 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4012 IEM_MC_ADVANCE_RIP();
4013 IEM_MC_END();
4014 break;
4015 }
4016 }
4017 else
4018 {
4019 /*
4020 * We're loading a register from memory.
4021 */
4022 switch (pVCpu->iem.s.enmEffOpSize)
4023 {
4024 case IEMMODE_16BIT:
4025 IEM_MC_BEGIN(0, 2);
4026 IEM_MC_LOCAL(uint16_t, u16Value);
4027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4030 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4031 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4032 IEM_MC_ADVANCE_RIP();
4033 IEM_MC_END();
4034 break;
4035
4036 case IEMMODE_32BIT:
4037 IEM_MC_BEGIN(0, 2);
4038 IEM_MC_LOCAL(uint32_t, u32Value);
4039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4042 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4043 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4044 IEM_MC_ADVANCE_RIP();
4045 IEM_MC_END();
4046 break;
4047
4048 case IEMMODE_64BIT:
4049 IEM_MC_BEGIN(0, 2);
4050 IEM_MC_LOCAL(uint64_t, u64Value);
4051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4054 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4055 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4056 IEM_MC_ADVANCE_RIP();
4057 IEM_MC_END();
4058 break;
4059 }
4060 }
4061 return VINF_SUCCESS;
4062}
4063
4064
4065/**
4066 * opcode 0x63
4067 * @todo Table fixme
4068 */
4069FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4070{
4071 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4072 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4073 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4074 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4075 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4076}
4077
4078
4079/**
4080 * @opcode 0x8c
4081 */
4082FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4083{
4084 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4085
4086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4087
4088 /*
4089 * Check that the destination register exists. The REX.R prefix is ignored.
4090 */
4091 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4092 if ( iSegReg > X86_SREG_GS)
4093 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4094
4095 /*
4096 * If rm is denoting a register, no more instruction bytes.
4097 * In that case, the operand size is respected and the upper bits are
4098 * cleared (starting with some pentium).
4099 */
4100 if (IEM_IS_MODRM_REG_MODE(bRm))
4101 {
4102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4103 switch (pVCpu->iem.s.enmEffOpSize)
4104 {
4105 case IEMMODE_16BIT:
4106 IEM_MC_BEGIN(0, 1);
4107 IEM_MC_LOCAL(uint16_t, u16Value);
4108 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4109 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 break;
4113
4114 case IEMMODE_32BIT:
4115 IEM_MC_BEGIN(0, 1);
4116 IEM_MC_LOCAL(uint32_t, u32Value);
4117 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4118 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4119 IEM_MC_ADVANCE_RIP();
4120 IEM_MC_END();
4121 break;
4122
4123 case IEMMODE_64BIT:
4124 IEM_MC_BEGIN(0, 1);
4125 IEM_MC_LOCAL(uint64_t, u64Value);
4126 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4127 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4128 IEM_MC_ADVANCE_RIP();
4129 IEM_MC_END();
4130 break;
4131 }
4132 }
4133 else
4134 {
4135 /*
4136 * We're saving the register to memory. The access is word sized
4137 * regardless of operand size prefixes.
4138 */
4139#if 0 /* not necessary */
4140 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4141#endif
4142 IEM_MC_BEGIN(0, 2);
4143 IEM_MC_LOCAL(uint16_t, u16Value);
4144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4147 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4149 IEM_MC_ADVANCE_RIP();
4150 IEM_MC_END();
4151 }
4152 return VINF_SUCCESS;
4153}
4154
4155
4156
4157
4158/**
4159 * @opcode 0x8d
4160 */
4161FNIEMOP_DEF(iemOp_lea_Gv_M)
4162{
4163 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4165 if (IEM_IS_MODRM_REG_MODE(bRm))
4166 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4167
4168 switch (pVCpu->iem.s.enmEffOpSize)
4169 {
4170 case IEMMODE_16BIT:
4171 IEM_MC_BEGIN(0, 2);
4172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4173 IEM_MC_LOCAL(uint16_t, u16Cast);
4174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4176 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4177 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
4178 IEM_MC_ADVANCE_RIP();
4179 IEM_MC_END();
4180 return VINF_SUCCESS;
4181
4182 case IEMMODE_32BIT:
4183 IEM_MC_BEGIN(0, 2);
4184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4185 IEM_MC_LOCAL(uint32_t, u32Cast);
4186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4189 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
4190 IEM_MC_ADVANCE_RIP();
4191 IEM_MC_END();
4192 return VINF_SUCCESS;
4193
4194 case IEMMODE_64BIT:
4195 IEM_MC_BEGIN(0, 1);
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4199 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
4200 IEM_MC_ADVANCE_RIP();
4201 IEM_MC_END();
4202 return VINF_SUCCESS;
4203 }
4204 AssertFailedReturn(VERR_IEM_IPE_7);
4205}
4206
4207
4208/**
4209 * @opcode 0x8e
4210 */
4211FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4212{
4213 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4214
4215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4216
4217 /*
4218 * The practical operand size is 16-bit.
4219 */
4220#if 0 /* not necessary */
4221 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4222#endif
4223
4224 /*
4225 * Check that the destination register exists and can be used with this
4226 * instruction. The REX.R prefix is ignored.
4227 */
4228 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4229 if ( iSegReg == X86_SREG_CS
4230 || iSegReg > X86_SREG_GS)
4231 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4232
4233 /*
4234 * If rm is denoting a register, no more instruction bytes.
4235 */
4236 if (IEM_IS_MODRM_REG_MODE(bRm))
4237 {
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4239 IEM_MC_BEGIN(2, 0);
4240 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4241 IEM_MC_ARG(uint16_t, u16Value, 1);
4242 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4243 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4244 IEM_MC_END();
4245 }
4246 else
4247 {
4248 /*
4249 * We're loading the register from memory. The access is word sized
4250 * regardless of operand size prefixes.
4251 */
4252 IEM_MC_BEGIN(2, 1);
4253 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4254 IEM_MC_ARG(uint16_t, u16Value, 1);
4255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4258 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4259 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4260 IEM_MC_END();
4261 }
4262 return VINF_SUCCESS;
4263}
4264
4265
4266/** Opcode 0x8f /0. */
4267FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4268{
4269 /* This bugger is rather annoying as it requires rSP to be updated before
4270 doing the effective address calculations. Will eventually require a
4271 split between the R/M+SIB decoding and the effective address
4272 calculation - which is something that is required for any attempt at
4273 reusing this code for a recompiler. It may also be good to have if we
4274 need to delay #UD exception caused by invalid lock prefixes.
4275
4276 For now, we'll do a mostly safe interpreter-only implementation here. */
4277 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4278 * now until tests show it's checked.. */
4279 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4280
4281 /* Register access is relatively easy and can share code. */
4282 if (IEM_IS_MODRM_REG_MODE(bRm))
4283 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
4284
4285 /*
4286 * Memory target.
4287 *
4288 * Intel says that RSP is incremented before it's used in any effective
4289 * address calcuations. This means some serious extra annoyance here since
4290 * we decode and calculate the effective address in one step and like to
4291 * delay committing registers till everything is done.
4292 *
4293 * So, we'll decode and calculate the effective address twice. This will
4294 * require some recoding if turned into a recompiler.
4295 */
4296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4297
4298#ifndef TST_IEM_CHECK_MC
4299 /* Calc effective address with modified ESP. */
4300/** @todo testcase */
4301 RTGCPTR GCPtrEff;
4302 VBOXSTRICTRC rcStrict;
4303 switch (pVCpu->iem.s.enmEffOpSize)
4304 {
4305 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4306 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4307 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4309 }
4310 if (rcStrict != VINF_SUCCESS)
4311 return rcStrict;
4312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4313
4314 /* Perform the operation - this should be CImpl. */
4315 RTUINT64U TmpRsp;
4316 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4317 switch (pVCpu->iem.s.enmEffOpSize)
4318 {
4319 case IEMMODE_16BIT:
4320 {
4321 uint16_t u16Value;
4322 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4323 if (rcStrict == VINF_SUCCESS)
4324 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4325 break;
4326 }
4327
4328 case IEMMODE_32BIT:
4329 {
4330 uint32_t u32Value;
4331 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4332 if (rcStrict == VINF_SUCCESS)
4333 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4334 break;
4335 }
4336
4337 case IEMMODE_64BIT:
4338 {
4339 uint64_t u64Value;
4340 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4341 if (rcStrict == VINF_SUCCESS)
4342 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4343 break;
4344 }
4345
4346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4347 }
4348 if (rcStrict == VINF_SUCCESS)
4349 {
4350 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4351 iemRegUpdateRipAndClearRF(pVCpu);
4352 }
4353 return rcStrict;
4354
4355#else
4356 return VERR_IEM_IPE_2;
4357#endif
4358}
4359
4360
4361/**
4362 * @opcode 0x8f
4363 */
4364FNIEMOP_DEF(iemOp_Grp1A__xop)
4365{
4366 /*
4367 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4368 * three byte VEX prefix, except that the mmmmm field cannot have the values
4369 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4370 */
4371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4372 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4373 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4374
4375 IEMOP_MNEMONIC(xop, "xop");
4376 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4377 {
4378 /** @todo Test when exctly the XOP conformance checks kick in during
4379 * instruction decoding and fetching (using \#PF). */
4380 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4381 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4382 if ( ( pVCpu->iem.s.fPrefixes
4383 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4384 == 0)
4385 {
4386 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4387 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4388 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4389 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4390 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4391 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4392 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4393 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4394 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4395
4396 /** @todo XOP: Just use new tables and decoders. */
4397 switch (bRm & 0x1f)
4398 {
4399 case 8: /* xop opcode map 8. */
4400 IEMOP_BITCH_ABOUT_STUB();
4401 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4402
4403 case 9: /* xop opcode map 9. */
4404 IEMOP_BITCH_ABOUT_STUB();
4405 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4406
4407 case 10: /* xop opcode map 10. */
4408 IEMOP_BITCH_ABOUT_STUB();
4409 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4410
4411 default:
4412 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4413 return IEMOP_RAISE_INVALID_OPCODE();
4414 }
4415 }
4416 else
4417 Log(("XOP: Invalid prefix mix!\n"));
4418 }
4419 else
4420 Log(("XOP: XOP support disabled!\n"));
4421 return IEMOP_RAISE_INVALID_OPCODE();
4422}
4423
4424
4425/**
4426 * Common 'xchg reg,rAX' helper.
4427 */
4428FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4429{
4430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4431
4432 iReg |= pVCpu->iem.s.uRexB;
4433 switch (pVCpu->iem.s.enmEffOpSize)
4434 {
4435 case IEMMODE_16BIT:
4436 IEM_MC_BEGIN(0, 2);
4437 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4438 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4439 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4440 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4441 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4442 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 return VINF_SUCCESS;
4446
4447 case IEMMODE_32BIT:
4448 IEM_MC_BEGIN(0, 2);
4449 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4450 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4451 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4452 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4453 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4454 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4455 IEM_MC_ADVANCE_RIP();
4456 IEM_MC_END();
4457 return VINF_SUCCESS;
4458
4459 case IEMMODE_64BIT:
4460 IEM_MC_BEGIN(0, 2);
4461 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4462 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4463 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4464 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4465 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4466 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4467 IEM_MC_ADVANCE_RIP();
4468 IEM_MC_END();
4469 return VINF_SUCCESS;
4470
4471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4472 }
4473}
4474
4475
4476/**
4477 * @opcode 0x90
4478 */
4479FNIEMOP_DEF(iemOp_nop)
4480{
4481 /* R8/R8D and RAX/EAX can be exchanged. */
4482 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4483 {
4484 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4485 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4486 }
4487
4488 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4489 {
4490 IEMOP_MNEMONIC(pause, "pause");
4491#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4492 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4494#endif
4495#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4496 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4498#endif
4499 }
4500 else
4501 IEMOP_MNEMONIC(nop, "nop");
4502 IEM_MC_BEGIN(0, 0);
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 return VINF_SUCCESS;
4506}
4507
4508
4509/**
4510 * @opcode 0x91
4511 */
4512FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4513{
4514 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4515 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4516}
4517
4518
4519/**
4520 * @opcode 0x92
4521 */
4522FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4523{
4524 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4525 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4526}
4527
4528
4529/**
4530 * @opcode 0x93
4531 */
4532FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4533{
4534 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4535 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4536}
4537
4538
4539/**
4540 * @opcode 0x94
4541 */
4542FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4543{
4544 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4545 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4546}
4547
4548
4549/**
4550 * @opcode 0x95
4551 */
4552FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4553{
4554 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4555 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4556}
4557
4558
4559/**
4560 * @opcode 0x96
4561 */
4562FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4563{
4564 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4565 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4566}
4567
4568
4569/**
4570 * @opcode 0x97
4571 */
4572FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4573{
4574 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4575 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4576}
4577
4578
4579/**
4580 * @opcode 0x98
4581 */
4582FNIEMOP_DEF(iemOp_cbw)
4583{
4584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4585 switch (pVCpu->iem.s.enmEffOpSize)
4586 {
4587 case IEMMODE_16BIT:
4588 IEMOP_MNEMONIC(cbw, "cbw");
4589 IEM_MC_BEGIN(0, 1);
4590 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4591 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4592 } IEM_MC_ELSE() {
4593 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4594 } IEM_MC_ENDIF();
4595 IEM_MC_ADVANCE_RIP();
4596 IEM_MC_END();
4597 return VINF_SUCCESS;
4598
4599 case IEMMODE_32BIT:
4600 IEMOP_MNEMONIC(cwde, "cwde");
4601 IEM_MC_BEGIN(0, 1);
4602 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4603 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4604 } IEM_MC_ELSE() {
4605 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4606 } IEM_MC_ENDIF();
4607 IEM_MC_ADVANCE_RIP();
4608 IEM_MC_END();
4609 return VINF_SUCCESS;
4610
4611 case IEMMODE_64BIT:
4612 IEMOP_MNEMONIC(cdqe, "cdqe");
4613 IEM_MC_BEGIN(0, 1);
4614 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4615 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4616 } IEM_MC_ELSE() {
4617 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4618 } IEM_MC_ENDIF();
4619 IEM_MC_ADVANCE_RIP();
4620 IEM_MC_END();
4621 return VINF_SUCCESS;
4622
4623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4624 }
4625}
4626
4627
4628/**
4629 * @opcode 0x99
4630 */
4631FNIEMOP_DEF(iemOp_cwd)
4632{
4633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4634 switch (pVCpu->iem.s.enmEffOpSize)
4635 {
4636 case IEMMODE_16BIT:
4637 IEMOP_MNEMONIC(cwd, "cwd");
4638 IEM_MC_BEGIN(0, 1);
4639 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4640 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4641 } IEM_MC_ELSE() {
4642 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4643 } IEM_MC_ENDIF();
4644 IEM_MC_ADVANCE_RIP();
4645 IEM_MC_END();
4646 return VINF_SUCCESS;
4647
4648 case IEMMODE_32BIT:
4649 IEMOP_MNEMONIC(cdq, "cdq");
4650 IEM_MC_BEGIN(0, 1);
4651 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4652 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 return VINF_SUCCESS;
4659
4660 case IEMMODE_64BIT:
4661 IEMOP_MNEMONIC(cqo, "cqo");
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4664 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4665 } IEM_MC_ELSE() {
4666 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4667 } IEM_MC_ENDIF();
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 return VINF_SUCCESS;
4671
4672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4673 }
4674}
4675
4676
4677/**
4678 * @opcode 0x9a
4679 */
4680FNIEMOP_DEF(iemOp_call_Ap)
4681{
4682 IEMOP_MNEMONIC(call_Ap, "call Ap");
4683 IEMOP_HLP_NO_64BIT();
4684
4685 /* Decode the far pointer address and pass it on to the far call C implementation. */
4686 uint32_t offSeg;
4687 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4688 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4689 else
4690 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4691 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4694}
4695
4696
4697/** Opcode 0x9b. (aka fwait) */
4698FNIEMOP_DEF(iemOp_wait)
4699{
4700 IEMOP_MNEMONIC(wait, "wait");
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4702
4703 IEM_MC_BEGIN(0, 0);
4704 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4706 IEM_MC_ADVANCE_RIP();
4707 IEM_MC_END();
4708 return VINF_SUCCESS;
4709}
4710
4711
4712/**
4713 * @opcode 0x9c
4714 */
4715FNIEMOP_DEF(iemOp_pushf_Fv)
4716{
4717 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4720 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4721}
4722
4723
4724/**
4725 * @opcode 0x9d
4726 */
4727FNIEMOP_DEF(iemOp_popf_Fv)
4728{
4729 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4732 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4733}
4734
4735
4736/**
4737 * @opcode 0x9e
4738 */
4739FNIEMOP_DEF(iemOp_sahf)
4740{
4741 IEMOP_MNEMONIC(sahf, "sahf");
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4743 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4744 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4745 return IEMOP_RAISE_INVALID_OPCODE();
4746 IEM_MC_BEGIN(0, 2);
4747 IEM_MC_LOCAL(uint32_t, u32Flags);
4748 IEM_MC_LOCAL(uint32_t, EFlags);
4749 IEM_MC_FETCH_EFLAGS(EFlags);
4750 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4751 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4752 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4753 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4754 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4755 IEM_MC_COMMIT_EFLAGS(EFlags);
4756 IEM_MC_ADVANCE_RIP();
4757 IEM_MC_END();
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/**
4763 * @opcode 0x9f
4764 */
4765FNIEMOP_DEF(iemOp_lahf)
4766{
4767 IEMOP_MNEMONIC(lahf, "lahf");
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4770 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4771 return IEMOP_RAISE_INVALID_OPCODE();
4772 IEM_MC_BEGIN(0, 1);
4773 IEM_MC_LOCAL(uint8_t, u8Flags);
4774 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4775 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4776 IEM_MC_ADVANCE_RIP();
4777 IEM_MC_END();
4778 return VINF_SUCCESS;
4779}
4780
4781
4782/**
4783 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4784 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4785 * prefixes. Will return on failures.
4786 * @param a_GCPtrMemOff The variable to store the offset in.
4787 */
4788#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4789 do \
4790 { \
4791 switch (pVCpu->iem.s.enmEffAddrMode) \
4792 { \
4793 case IEMMODE_16BIT: \
4794 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4795 break; \
4796 case IEMMODE_32BIT: \
4797 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4798 break; \
4799 case IEMMODE_64BIT: \
4800 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4801 break; \
4802 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4803 } \
4804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4805 } while (0)
4806
4807/**
4808 * @opcode 0xa0
4809 */
4810FNIEMOP_DEF(iemOp_mov_AL_Ob)
4811{
4812 /*
4813 * Get the offset and fend off lock prefixes.
4814 */
4815 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4816 RTGCPTR GCPtrMemOff;
4817 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4818
4819 /*
4820 * Fetch AL.
4821 */
4822 IEM_MC_BEGIN(0,1);
4823 IEM_MC_LOCAL(uint8_t, u8Tmp);
4824 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4825 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4826 IEM_MC_ADVANCE_RIP();
4827 IEM_MC_END();
4828 return VINF_SUCCESS;
4829}
4830
4831
4832/**
4833 * @opcode 0xa1
4834 */
4835FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4836{
4837 /*
4838 * Get the offset and fend off lock prefixes.
4839 */
4840 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4841 RTGCPTR GCPtrMemOff;
4842 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4843
4844 /*
4845 * Fetch rAX.
4846 */
4847 switch (pVCpu->iem.s.enmEffOpSize)
4848 {
4849 case IEMMODE_16BIT:
4850 IEM_MC_BEGIN(0,1);
4851 IEM_MC_LOCAL(uint16_t, u16Tmp);
4852 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4853 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4854 IEM_MC_ADVANCE_RIP();
4855 IEM_MC_END();
4856 return VINF_SUCCESS;
4857
4858 case IEMMODE_32BIT:
4859 IEM_MC_BEGIN(0,1);
4860 IEM_MC_LOCAL(uint32_t, u32Tmp);
4861 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4862 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_64BIT:
4868 IEM_MC_BEGIN(0,1);
4869 IEM_MC_LOCAL(uint64_t, u64Tmp);
4870 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4871 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 return VINF_SUCCESS;
4875
4876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4877 }
4878}
4879
4880
4881/**
4882 * @opcode 0xa2
4883 */
4884FNIEMOP_DEF(iemOp_mov_Ob_AL)
4885{
4886 /*
4887 * Get the offset and fend off lock prefixes.
4888 */
4889 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4890 RTGCPTR GCPtrMemOff;
4891 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4892
4893 /*
4894 * Store AL.
4895 */
4896 IEM_MC_BEGIN(0,1);
4897 IEM_MC_LOCAL(uint8_t, u8Tmp);
4898 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4899 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4900 IEM_MC_ADVANCE_RIP();
4901 IEM_MC_END();
4902 return VINF_SUCCESS;
4903}
4904
4905
4906/**
4907 * @opcode 0xa3
4908 */
4909FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4910{
4911 /*
4912 * Get the offset and fend off lock prefixes.
4913 */
4914 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4915 RTGCPTR GCPtrMemOff;
4916 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4917
4918 /*
4919 * Store rAX.
4920 */
4921 switch (pVCpu->iem.s.enmEffOpSize)
4922 {
4923 case IEMMODE_16BIT:
4924 IEM_MC_BEGIN(0,1);
4925 IEM_MC_LOCAL(uint16_t, u16Tmp);
4926 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4927 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4928 IEM_MC_ADVANCE_RIP();
4929 IEM_MC_END();
4930 return VINF_SUCCESS;
4931
4932 case IEMMODE_32BIT:
4933 IEM_MC_BEGIN(0,1);
4934 IEM_MC_LOCAL(uint32_t, u32Tmp);
4935 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4936 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4937 IEM_MC_ADVANCE_RIP();
4938 IEM_MC_END();
4939 return VINF_SUCCESS;
4940
4941 case IEMMODE_64BIT:
4942 IEM_MC_BEGIN(0,1);
4943 IEM_MC_LOCAL(uint64_t, u64Tmp);
4944 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4946 IEM_MC_ADVANCE_RIP();
4947 IEM_MC_END();
4948 return VINF_SUCCESS;
4949
4950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4951 }
4952}
4953
4954/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4955#define IEM_MOVS_CASE(ValBits, AddrBits) \
4956 IEM_MC_BEGIN(0, 2); \
4957 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4958 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4959 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4960 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4961 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4962 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4964 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4965 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4966 } IEM_MC_ELSE() { \
4967 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4968 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4969 } IEM_MC_ENDIF(); \
4970 IEM_MC_ADVANCE_RIP(); \
4971 IEM_MC_END();
4972
4973/**
4974 * @opcode 0xa4
4975 */
4976FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4977{
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4979
4980 /*
4981 * Use the C implementation if a repeat prefix is encountered.
4982 */
4983 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4984 {
4985 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4986 switch (pVCpu->iem.s.enmEffAddrMode)
4987 {
4988 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4989 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4990 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4992 }
4993 }
4994 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4995
4996 /*
4997 * Sharing case implementation with movs[wdq] below.
4998 */
4999 switch (pVCpu->iem.s.enmEffAddrMode)
5000 {
5001 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5002 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5003 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5005 }
5006 return VINF_SUCCESS;
5007}
5008
5009
5010/**
5011 * @opcode 0xa5
5012 */
5013FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5014{
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016
5017 /*
5018 * Use the C implementation if a repeat prefix is encountered.
5019 */
5020 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5021 {
5022 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5023 switch (pVCpu->iem.s.enmEffOpSize)
5024 {
5025 case IEMMODE_16BIT:
5026 switch (pVCpu->iem.s.enmEffAddrMode)
5027 {
5028 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5029 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5030 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5032 }
5033 break;
5034 case IEMMODE_32BIT:
5035 switch (pVCpu->iem.s.enmEffAddrMode)
5036 {
5037 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5041 }
5042 case IEMMODE_64BIT:
5043 switch (pVCpu->iem.s.enmEffAddrMode)
5044 {
5045 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5046 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5047 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5049 }
5050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5051 }
5052 }
5053 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5054
5055 /*
5056 * Annoying double switch here.
5057 * Using ugly macro for implementing the cases, sharing it with movsb.
5058 */
5059 switch (pVCpu->iem.s.enmEffOpSize)
5060 {
5061 case IEMMODE_16BIT:
5062 switch (pVCpu->iem.s.enmEffAddrMode)
5063 {
5064 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5065 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5066 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5068 }
5069 break;
5070
5071 case IEMMODE_32BIT:
5072 switch (pVCpu->iem.s.enmEffAddrMode)
5073 {
5074 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5075 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5076 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5078 }
5079 break;
5080
5081 case IEMMODE_64BIT:
5082 switch (pVCpu->iem.s.enmEffAddrMode)
5083 {
5084 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5085 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5086 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5088 }
5089 break;
5090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5091 }
5092 return VINF_SUCCESS;
5093}
5094
5095#undef IEM_MOVS_CASE
5096
5097/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5098#define IEM_CMPS_CASE(ValBits, AddrBits) \
5099 IEM_MC_BEGIN(3, 3); \
5100 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5101 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5102 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5103 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5104 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5105 \
5106 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5107 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5108 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5109 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5110 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5111 IEM_MC_REF_EFLAGS(pEFlags); \
5112 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5113 \
5114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5115 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5116 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5117 } IEM_MC_ELSE() { \
5118 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5119 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5120 } IEM_MC_ENDIF(); \
5121 IEM_MC_ADVANCE_RIP(); \
5122 IEM_MC_END(); \
5123
5124/**
5125 * @opcode 0xa6
5126 */
5127FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5128{
5129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5130
5131 /*
5132 * Use the C implementation if a repeat prefix is encountered.
5133 */
5134 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5135 {
5136 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5137 switch (pVCpu->iem.s.enmEffAddrMode)
5138 {
5139 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5140 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5141 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5143 }
5144 }
5145 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5146 {
5147 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5148 switch (pVCpu->iem.s.enmEffAddrMode)
5149 {
5150 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5151 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5152 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5154 }
5155 }
5156 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5157
5158 /*
5159 * Sharing case implementation with cmps[wdq] below.
5160 */
5161 switch (pVCpu->iem.s.enmEffAddrMode)
5162 {
5163 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5164 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5165 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5167 }
5168 return VINF_SUCCESS;
5169
5170}
5171
5172
5173/**
5174 * @opcode 0xa7
5175 */
5176FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5177{
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179
5180 /*
5181 * Use the C implementation if a repeat prefix is encountered.
5182 */
5183 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5184 {
5185 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5186 switch (pVCpu->iem.s.enmEffOpSize)
5187 {
5188 case IEMMODE_16BIT:
5189 switch (pVCpu->iem.s.enmEffAddrMode)
5190 {
5191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5195 }
5196 break;
5197 case IEMMODE_32BIT:
5198 switch (pVCpu->iem.s.enmEffAddrMode)
5199 {
5200 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5201 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5202 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5204 }
5205 case IEMMODE_64BIT:
5206 switch (pVCpu->iem.s.enmEffAddrMode)
5207 {
5208 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5209 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5210 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5214 }
5215 }
5216
5217 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5218 {
5219 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5220 switch (pVCpu->iem.s.enmEffOpSize)
5221 {
5222 case IEMMODE_16BIT:
5223 switch (pVCpu->iem.s.enmEffAddrMode)
5224 {
5225 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5226 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5227 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5229 }
5230 break;
5231 case IEMMODE_32BIT:
5232 switch (pVCpu->iem.s.enmEffAddrMode)
5233 {
5234 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5235 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5236 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5238 }
5239 case IEMMODE_64BIT:
5240 switch (pVCpu->iem.s.enmEffAddrMode)
5241 {
5242 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5243 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5244 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5246 }
5247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5248 }
5249 }
5250
5251 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5252
5253 /*
5254 * Annoying double switch here.
5255 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5256 */
5257 switch (pVCpu->iem.s.enmEffOpSize)
5258 {
5259 case IEMMODE_16BIT:
5260 switch (pVCpu->iem.s.enmEffAddrMode)
5261 {
5262 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5263 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5264 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5266 }
5267 break;
5268
5269 case IEMMODE_32BIT:
5270 switch (pVCpu->iem.s.enmEffAddrMode)
5271 {
5272 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5273 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5274 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5276 }
5277 break;
5278
5279 case IEMMODE_64BIT:
5280 switch (pVCpu->iem.s.enmEffAddrMode)
5281 {
5282 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5283 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5284 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5286 }
5287 break;
5288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5289 }
5290 return VINF_SUCCESS;
5291
5292}
5293
5294#undef IEM_CMPS_CASE
5295
5296/**
5297 * @opcode 0xa8
5298 */
5299FNIEMOP_DEF(iemOp_test_AL_Ib)
5300{
5301 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5303 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5304}
5305
5306
5307/**
5308 * @opcode 0xa9
5309 */
5310FNIEMOP_DEF(iemOp_test_eAX_Iz)
5311{
5312 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5314 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5315}
5316
5317
5318/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5319#define IEM_STOS_CASE(ValBits, AddrBits) \
5320 IEM_MC_BEGIN(0, 2); \
5321 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5322 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5323 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5324 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5325 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5327 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5328 } IEM_MC_ELSE() { \
5329 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5330 } IEM_MC_ENDIF(); \
5331 IEM_MC_ADVANCE_RIP(); \
5332 IEM_MC_END(); \
5333
5334/**
5335 * @opcode 0xaa
5336 */
5337FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5338{
5339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5340
5341 /*
5342 * Use the C implementation if a repeat prefix is encountered.
5343 */
5344 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5345 {
5346 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5347 switch (pVCpu->iem.s.enmEffAddrMode)
5348 {
5349 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5350 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5351 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354 }
5355 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5356
5357 /*
5358 * Sharing case implementation with stos[wdq] below.
5359 */
5360 switch (pVCpu->iem.s.enmEffAddrMode)
5361 {
5362 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5363 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5364 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5366 }
5367 return VINF_SUCCESS;
5368}
5369
5370
5371/**
5372 * @opcode 0xab
5373 */
5374FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5375{
5376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5377
5378 /*
5379 * Use the C implementation if a repeat prefix is encountered.
5380 */
5381 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5382 {
5383 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5384 switch (pVCpu->iem.s.enmEffOpSize)
5385 {
5386 case IEMMODE_16BIT:
5387 switch (pVCpu->iem.s.enmEffAddrMode)
5388 {
5389 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5390 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5391 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5393 }
5394 break;
5395 case IEMMODE_32BIT:
5396 switch (pVCpu->iem.s.enmEffAddrMode)
5397 {
5398 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5399 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5400 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5402 }
5403 case IEMMODE_64BIT:
5404 switch (pVCpu->iem.s.enmEffAddrMode)
5405 {
5406 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5410 }
5411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5412 }
5413 }
5414 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5415
5416 /*
5417 * Annoying double switch here.
5418 * Using ugly macro for implementing the cases, sharing it with stosb.
5419 */
5420 switch (pVCpu->iem.s.enmEffOpSize)
5421 {
5422 case IEMMODE_16BIT:
5423 switch (pVCpu->iem.s.enmEffAddrMode)
5424 {
5425 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5426 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5427 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 break;
5431
5432 case IEMMODE_32BIT:
5433 switch (pVCpu->iem.s.enmEffAddrMode)
5434 {
5435 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5436 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5437 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5439 }
5440 break;
5441
5442 case IEMMODE_64BIT:
5443 switch (pVCpu->iem.s.enmEffAddrMode)
5444 {
5445 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5446 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5447 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5449 }
5450 break;
5451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5452 }
5453 return VINF_SUCCESS;
5454}
5455
5456#undef IEM_STOS_CASE
5457
5458/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5459#define IEM_LODS_CASE(ValBits, AddrBits) \
5460 IEM_MC_BEGIN(0, 2); \
5461 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5462 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5463 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5464 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5465 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5467 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5468 } IEM_MC_ELSE() { \
5469 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5470 } IEM_MC_ENDIF(); \
5471 IEM_MC_ADVANCE_RIP(); \
5472 IEM_MC_END();
5473
5474/**
5475 * @opcode 0xac
5476 */
5477FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5478{
5479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5480
5481 /*
5482 * Use the C implementation if a repeat prefix is encountered.
5483 */
5484 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5485 {
5486 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5487 switch (pVCpu->iem.s.enmEffAddrMode)
5488 {
5489 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5490 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5491 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5493 }
5494 }
5495 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5496
5497 /*
5498 * Sharing case implementation with stos[wdq] below.
5499 */
5500 switch (pVCpu->iem.s.enmEffAddrMode)
5501 {
5502 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5503 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5504 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5506 }
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/**
5512 * @opcode 0xad
5513 */
5514FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5515{
5516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5517
5518 /*
5519 * Use the C implementation if a repeat prefix is encountered.
5520 */
5521 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5522 {
5523 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5524 switch (pVCpu->iem.s.enmEffOpSize)
5525 {
5526 case IEMMODE_16BIT:
5527 switch (pVCpu->iem.s.enmEffAddrMode)
5528 {
5529 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5530 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5531 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5533 }
5534 break;
5535 case IEMMODE_32BIT:
5536 switch (pVCpu->iem.s.enmEffAddrMode)
5537 {
5538 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5539 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5540 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 case IEMMODE_64BIT:
5544 switch (pVCpu->iem.s.enmEffAddrMode)
5545 {
5546 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5547 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5548 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5550 }
5551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5552 }
5553 }
5554 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5555
5556 /*
5557 * Annoying double switch here.
5558 * Using ugly macro for implementing the cases, sharing it with lodsb.
5559 */
5560 switch (pVCpu->iem.s.enmEffOpSize)
5561 {
5562 case IEMMODE_16BIT:
5563 switch (pVCpu->iem.s.enmEffAddrMode)
5564 {
5565 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5566 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5567 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5569 }
5570 break;
5571
5572 case IEMMODE_32BIT:
5573 switch (pVCpu->iem.s.enmEffAddrMode)
5574 {
5575 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5576 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5577 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5579 }
5580 break;
5581
5582 case IEMMODE_64BIT:
5583 switch (pVCpu->iem.s.enmEffAddrMode)
5584 {
5585 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5586 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5587 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5589 }
5590 break;
5591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5592 }
5593 return VINF_SUCCESS;
5594}
5595
5596#undef IEM_LODS_CASE
5597
5598/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5599#define IEM_SCAS_CASE(ValBits, AddrBits) \
5600 IEM_MC_BEGIN(3, 2); \
5601 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5602 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5603 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5604 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5605 \
5606 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5607 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5608 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5609 IEM_MC_REF_EFLAGS(pEFlags); \
5610 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5611 \
5612 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5613 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5614 } IEM_MC_ELSE() { \
5615 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5616 } IEM_MC_ENDIF(); \
5617 IEM_MC_ADVANCE_RIP(); \
5618 IEM_MC_END();
5619
5620/**
5621 * @opcode 0xae
5622 */
5623FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5624{
5625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5626
5627 /*
5628 * Use the C implementation if a repeat prefix is encountered.
5629 */
5630 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5631 {
5632 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5633 switch (pVCpu->iem.s.enmEffAddrMode)
5634 {
5635 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5636 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5637 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5639 }
5640 }
5641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5642 {
5643 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5644 switch (pVCpu->iem.s.enmEffAddrMode)
5645 {
5646 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5647 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5648 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5650 }
5651 }
5652 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5653
5654 /*
5655 * Sharing case implementation with stos[wdq] below.
5656 */
5657 switch (pVCpu->iem.s.enmEffAddrMode)
5658 {
5659 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5660 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5661 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5663 }
5664 return VINF_SUCCESS;
5665}
5666
5667
5668/**
5669 * @opcode 0xaf
5670 */
5671FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5672{
5673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5674
5675 /*
5676 * Use the C implementation if a repeat prefix is encountered.
5677 */
5678 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5679 {
5680 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5681 switch (pVCpu->iem.s.enmEffOpSize)
5682 {
5683 case IEMMODE_16BIT:
5684 switch (pVCpu->iem.s.enmEffAddrMode)
5685 {
5686 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5687 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5688 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5690 }
5691 break;
5692 case IEMMODE_32BIT:
5693 switch (pVCpu->iem.s.enmEffAddrMode)
5694 {
5695 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5696 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5697 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5699 }
5700 case IEMMODE_64BIT:
5701 switch (pVCpu->iem.s.enmEffAddrMode)
5702 {
5703 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5704 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5705 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5707 }
5708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5709 }
5710 }
5711 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5712 {
5713 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5714 switch (pVCpu->iem.s.enmEffOpSize)
5715 {
5716 case IEMMODE_16BIT:
5717 switch (pVCpu->iem.s.enmEffAddrMode)
5718 {
5719 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5720 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5721 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5723 }
5724 break;
5725 case IEMMODE_32BIT:
5726 switch (pVCpu->iem.s.enmEffAddrMode)
5727 {
5728 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5729 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5730 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5732 }
5733 case IEMMODE_64BIT:
5734 switch (pVCpu->iem.s.enmEffAddrMode)
5735 {
5736 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5737 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5738 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5740 }
5741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5742 }
5743 }
5744 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5745
5746 /*
5747 * Annoying double switch here.
5748 * Using ugly macro for implementing the cases, sharing it with scasb.
5749 */
5750 switch (pVCpu->iem.s.enmEffOpSize)
5751 {
5752 case IEMMODE_16BIT:
5753 switch (pVCpu->iem.s.enmEffAddrMode)
5754 {
5755 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5756 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5757 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5759 }
5760 break;
5761
5762 case IEMMODE_32BIT:
5763 switch (pVCpu->iem.s.enmEffAddrMode)
5764 {
5765 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5766 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5767 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5769 }
5770 break;
5771
5772 case IEMMODE_64BIT:
5773 switch (pVCpu->iem.s.enmEffAddrMode)
5774 {
5775 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5776 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5777 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5779 }
5780 break;
5781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5782 }
5783 return VINF_SUCCESS;
5784}
5785
5786#undef IEM_SCAS_CASE
5787
5788/**
5789 * Common 'mov r8, imm8' helper.
5790 */
5791FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5792{
5793 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5795
5796 IEM_MC_BEGIN(0, 1);
5797 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5798 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5799 IEM_MC_ADVANCE_RIP();
5800 IEM_MC_END();
5801
5802 return VINF_SUCCESS;
5803}
5804
5805
5806/**
5807 * @opcode 0xb0
5808 */
5809FNIEMOP_DEF(iemOp_mov_AL_Ib)
5810{
5811 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5812 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5813}
5814
5815
5816/**
5817 * @opcode 0xb1
5818 */
5819FNIEMOP_DEF(iemOp_CL_Ib)
5820{
5821 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5822 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5823}
5824
5825
5826/**
5827 * @opcode 0xb2
5828 */
5829FNIEMOP_DEF(iemOp_DL_Ib)
5830{
5831 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5832 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5833}
5834
5835
5836/**
5837 * @opcode 0xb3
5838 */
5839FNIEMOP_DEF(iemOp_BL_Ib)
5840{
5841 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5842 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5843}
5844
5845
5846/**
5847 * @opcode 0xb4
5848 */
5849FNIEMOP_DEF(iemOp_mov_AH_Ib)
5850{
5851 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5852 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5853}
5854
5855
5856/**
5857 * @opcode 0xb5
5858 */
5859FNIEMOP_DEF(iemOp_CH_Ib)
5860{
5861 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5862 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5863}
5864
5865
5866/**
5867 * @opcode 0xb6
5868 */
5869FNIEMOP_DEF(iemOp_DH_Ib)
5870{
5871 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5872 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5873}
5874
5875
5876/**
5877 * @opcode 0xb7
5878 */
5879FNIEMOP_DEF(iemOp_BH_Ib)
5880{
5881 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5882 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5883}
5884
5885
5886/**
5887 * Common 'mov regX,immX' helper.
5888 */
5889FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5890{
5891 switch (pVCpu->iem.s.enmEffOpSize)
5892 {
5893 case IEMMODE_16BIT:
5894 {
5895 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5897
5898 IEM_MC_BEGIN(0, 1);
5899 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5900 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5901 IEM_MC_ADVANCE_RIP();
5902 IEM_MC_END();
5903 break;
5904 }
5905
5906 case IEMMODE_32BIT:
5907 {
5908 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5910
5911 IEM_MC_BEGIN(0, 1);
5912 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5913 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5914 IEM_MC_ADVANCE_RIP();
5915 IEM_MC_END();
5916 break;
5917 }
5918 case IEMMODE_64BIT:
5919 {
5920 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5922
5923 IEM_MC_BEGIN(0, 1);
5924 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5925 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5926 IEM_MC_ADVANCE_RIP();
5927 IEM_MC_END();
5928 break;
5929 }
5930 }
5931
5932 return VINF_SUCCESS;
5933}
5934
5935
5936/**
5937 * @opcode 0xb8
5938 */
5939FNIEMOP_DEF(iemOp_eAX_Iv)
5940{
5941 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5942 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5943}
5944
5945
5946/**
5947 * @opcode 0xb9
5948 */
5949FNIEMOP_DEF(iemOp_eCX_Iv)
5950{
5951 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5952 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5953}
5954
5955
5956/**
5957 * @opcode 0xba
5958 */
5959FNIEMOP_DEF(iemOp_eDX_Iv)
5960{
5961 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5962 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5963}
5964
5965
5966/**
5967 * @opcode 0xbb
5968 */
5969FNIEMOP_DEF(iemOp_eBX_Iv)
5970{
5971 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5972 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5973}
5974
5975
5976/**
5977 * @opcode 0xbc
5978 */
5979FNIEMOP_DEF(iemOp_eSP_Iv)
5980{
5981 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5982 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5983}
5984
5985
5986/**
5987 * @opcode 0xbd
5988 */
5989FNIEMOP_DEF(iemOp_eBP_Iv)
5990{
5991 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5992 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5993}
5994
5995
5996/**
5997 * @opcode 0xbe
5998 */
5999FNIEMOP_DEF(iemOp_eSI_Iv)
6000{
6001 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6002 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6003}
6004
6005
6006/**
6007 * @opcode 0xbf
6008 */
6009FNIEMOP_DEF(iemOp_eDI_Iv)
6010{
6011 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6012 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6013}
6014
6015
6016/**
6017 * @opcode 0xc0
6018 */
6019FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6020{
6021 IEMOP_HLP_MIN_186();
6022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6023 PCIEMOPSHIFTSIZES pImpl;
6024 switch (IEM_GET_MODRM_REG_8(bRm))
6025 {
6026 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6027 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6028 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6029 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6030 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6031 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6032 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6033 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6034 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6035 }
6036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6037
6038 if (IEM_IS_MODRM_REG_MODE(bRm))
6039 {
6040 /* register */
6041 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6043 IEM_MC_BEGIN(3, 0);
6044 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6045 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6047 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6048 IEM_MC_REF_EFLAGS(pEFlags);
6049 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6050 IEM_MC_ADVANCE_RIP();
6051 IEM_MC_END();
6052 }
6053 else
6054 {
6055 /* memory */
6056 IEM_MC_BEGIN(3, 2);
6057 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6058 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6059 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6061
6062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6063 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6064 IEM_MC_ASSIGN(cShiftArg, cShift);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6067 IEM_MC_FETCH_EFLAGS(EFlags);
6068 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6069
6070 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6071 IEM_MC_COMMIT_EFLAGS(EFlags);
6072 IEM_MC_ADVANCE_RIP();
6073 IEM_MC_END();
6074 }
6075 return VINF_SUCCESS;
6076}
6077
6078
6079/**
6080 * @opcode 0xc1
6081 */
6082FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6083{
6084 IEMOP_HLP_MIN_186();
6085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6086 PCIEMOPSHIFTSIZES pImpl;
6087 switch (IEM_GET_MODRM_REG_8(bRm))
6088 {
6089 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6090 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6091 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6092 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6093 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6094 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6095 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6096 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6097 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6098 }
6099 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6100
6101 if (IEM_IS_MODRM_REG_MODE(bRm))
6102 {
6103 /* register */
6104 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6106 switch (pVCpu->iem.s.enmEffOpSize)
6107 {
6108 case IEMMODE_16BIT:
6109 IEM_MC_BEGIN(3, 0);
6110 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6111 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6112 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6113 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6114 IEM_MC_REF_EFLAGS(pEFlags);
6115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6116 IEM_MC_ADVANCE_RIP();
6117 IEM_MC_END();
6118 return VINF_SUCCESS;
6119
6120 case IEMMODE_32BIT:
6121 IEM_MC_BEGIN(3, 0);
6122 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6123 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6124 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6125 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6126 IEM_MC_REF_EFLAGS(pEFlags);
6127 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6128 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6129 IEM_MC_ADVANCE_RIP();
6130 IEM_MC_END();
6131 return VINF_SUCCESS;
6132
6133 case IEMMODE_64BIT:
6134 IEM_MC_BEGIN(3, 0);
6135 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6136 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6138 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6139 IEM_MC_REF_EFLAGS(pEFlags);
6140 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6141 IEM_MC_ADVANCE_RIP();
6142 IEM_MC_END();
6143 return VINF_SUCCESS;
6144
6145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6146 }
6147 }
6148 else
6149 {
6150 /* memory */
6151 switch (pVCpu->iem.s.enmEffOpSize)
6152 {
6153 case IEMMODE_16BIT:
6154 IEM_MC_BEGIN(3, 2);
6155 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6156 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6159
6160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6161 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6162 IEM_MC_ASSIGN(cShiftArg, cShift);
6163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6164 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6165 IEM_MC_FETCH_EFLAGS(EFlags);
6166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6167
6168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6169 IEM_MC_COMMIT_EFLAGS(EFlags);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_32BIT:
6175 IEM_MC_BEGIN(3, 2);
6176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6177 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6178 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6180
6181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6182 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6183 IEM_MC_ASSIGN(cShiftArg, cShift);
6184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6185 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6186 IEM_MC_FETCH_EFLAGS(EFlags);
6187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6188
6189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6190 IEM_MC_COMMIT_EFLAGS(EFlags);
6191 IEM_MC_ADVANCE_RIP();
6192 IEM_MC_END();
6193 return VINF_SUCCESS;
6194
6195 case IEMMODE_64BIT:
6196 IEM_MC_BEGIN(3, 2);
6197 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6198 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6201
6202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6203 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6204 IEM_MC_ASSIGN(cShiftArg, cShift);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6206 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6207 IEM_MC_FETCH_EFLAGS(EFlags);
6208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6209
6210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6211 IEM_MC_COMMIT_EFLAGS(EFlags);
6212 IEM_MC_ADVANCE_RIP();
6213 IEM_MC_END();
6214 return VINF_SUCCESS;
6215
6216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6217 }
6218 }
6219}
6220
6221
6222/**
6223 * @opcode 0xc2
6224 */
6225FNIEMOP_DEF(iemOp_retn_Iw)
6226{
6227 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6228 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6230 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6231 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6232}
6233
6234
6235/**
6236 * @opcode 0xc3
6237 */
6238FNIEMOP_DEF(iemOp_retn)
6239{
6240 IEMOP_MNEMONIC(retn, "retn");
6241 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6243 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6244}
6245
6246
6247/**
6248 * @opcode 0xc4
6249 */
6250FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6251{
6252 /* The LDS instruction is invalid 64-bit mode. In legacy and
6253 compatability mode it is invalid with MOD=3.
6254 The use as a VEX prefix is made possible by assigning the inverted
6255 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6256 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6258 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6259 || IEM_IS_MODRM_REG_MODE(bRm) )
6260 {
6261 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6262 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6263 {
6264 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6265 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6266 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6267 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6268 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6269 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6270 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6271 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6272 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6273 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6274 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6275 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6276 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6277
6278 switch (bRm & 0x1f)
6279 {
6280 case 1: /* 0x0f lead opcode byte. */
6281#ifdef IEM_WITH_VEX
6282 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6283#else
6284 IEMOP_BITCH_ABOUT_STUB();
6285 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6286#endif
6287
6288 case 2: /* 0x0f 0x38 lead opcode bytes. */
6289#ifdef IEM_WITH_VEX
6290 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6291#else
6292 IEMOP_BITCH_ABOUT_STUB();
6293 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6294#endif
6295
6296 case 3: /* 0x0f 0x3a lead opcode bytes. */
6297#ifdef IEM_WITH_VEX
6298 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6299#else
6300 IEMOP_BITCH_ABOUT_STUB();
6301 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6302#endif
6303
6304 default:
6305 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6306 return IEMOP_RAISE_INVALID_OPCODE();
6307 }
6308 }
6309 Log(("VEX3: AVX support disabled!\n"));
6310 return IEMOP_RAISE_INVALID_OPCODE();
6311 }
6312
6313 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6314 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6315}
6316
6317
6318/**
6319 * @opcode 0xc5
6320 */
6321FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6322{
6323 /* The LES instruction is invalid 64-bit mode. In legacy and
6324 compatability mode it is invalid with MOD=3.
6325 The use as a VEX prefix is made possible by assigning the inverted
6326 REX.R to the top MOD bit, and the top bit in the inverted register
6327 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6328 to accessing registers 0..7 in this VEX form. */
6329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6330 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6331 || IEM_IS_MODRM_REG_MODE(bRm))
6332 {
6333 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6334 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6335 {
6336 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6337 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6338 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6340 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6341 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6342 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6343 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6344
6345#ifdef IEM_WITH_VEX
6346 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6347#else
6348 IEMOP_BITCH_ABOUT_STUB();
6349 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6350#endif
6351 }
6352
6353 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6354 Log(("VEX2: AVX support disabled!\n"));
6355 return IEMOP_RAISE_INVALID_OPCODE();
6356 }
6357
6358 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6359 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6360}
6361
6362
6363/**
6364 * @opcode 0xc6
6365 */
6366FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6367{
6368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6369 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6370 return IEMOP_RAISE_INVALID_OPCODE();
6371 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6372
6373 if (IEM_IS_MODRM_REG_MODE(bRm))
6374 {
6375 /* register access */
6376 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_BEGIN(0, 0);
6379 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
6380 IEM_MC_ADVANCE_RIP();
6381 IEM_MC_END();
6382 }
6383 else
6384 {
6385 /* memory access. */
6386 IEM_MC_BEGIN(0, 1);
6387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6389 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6392 IEM_MC_ADVANCE_RIP();
6393 IEM_MC_END();
6394 }
6395 return VINF_SUCCESS;
6396}
6397
6398
6399/**
6400 * @opcode 0xc7
6401 */
6402FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6403{
6404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6405 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6406 return IEMOP_RAISE_INVALID_OPCODE();
6407 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6408
6409 if (IEM_IS_MODRM_REG_MODE(bRm))
6410 {
6411 /* register access */
6412 switch (pVCpu->iem.s.enmEffOpSize)
6413 {
6414 case IEMMODE_16BIT:
6415 IEM_MC_BEGIN(0, 0);
6416 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
6419 IEM_MC_ADVANCE_RIP();
6420 IEM_MC_END();
6421 return VINF_SUCCESS;
6422
6423 case IEMMODE_32BIT:
6424 IEM_MC_BEGIN(0, 0);
6425 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6427 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
6428 IEM_MC_ADVANCE_RIP();
6429 IEM_MC_END();
6430 return VINF_SUCCESS;
6431
6432 case IEMMODE_64BIT:
6433 IEM_MC_BEGIN(0, 0);
6434 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6442 }
6443 }
6444 else
6445 {
6446 /* memory access. */
6447 switch (pVCpu->iem.s.enmEffOpSize)
6448 {
6449 case IEMMODE_16BIT:
6450 IEM_MC_BEGIN(0, 1);
6451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6453 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6456 IEM_MC_ADVANCE_RIP();
6457 IEM_MC_END();
6458 return VINF_SUCCESS;
6459
6460 case IEMMODE_32BIT:
6461 IEM_MC_BEGIN(0, 1);
6462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6464 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6467 IEM_MC_ADVANCE_RIP();
6468 IEM_MC_END();
6469 return VINF_SUCCESS;
6470
6471 case IEMMODE_64BIT:
6472 IEM_MC_BEGIN(0, 1);
6473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6475 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6477 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6478 IEM_MC_ADVANCE_RIP();
6479 IEM_MC_END();
6480 return VINF_SUCCESS;
6481
6482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6483 }
6484 }
6485}
6486
6487
6488
6489
6490/**
6491 * @opcode 0xc8
6492 */
6493FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6494{
6495 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6496 IEMOP_HLP_MIN_186();
6497 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6498 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6499 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6501 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6502}
6503
6504
6505/**
6506 * @opcode 0xc9
6507 */
6508FNIEMOP_DEF(iemOp_leave)
6509{
6510 IEMOP_MNEMONIC(leave, "leave");
6511 IEMOP_HLP_MIN_186();
6512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6515}
6516
6517
6518/**
6519 * @opcode 0xca
6520 */
6521FNIEMOP_DEF(iemOp_retf_Iw)
6522{
6523 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6524 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6527 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6528}
6529
6530
6531/**
6532 * @opcode 0xcb
6533 */
6534FNIEMOP_DEF(iemOp_retf)
6535{
6536 IEMOP_MNEMONIC(retf, "retf");
6537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6538 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6539 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6540}
6541
6542
6543/**
6544 * @opcode 0xcc
6545 */
6546FNIEMOP_DEF(iemOp_int3)
6547{
6548 IEMOP_MNEMONIC(int3, "int3");
6549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6550 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6551}
6552
6553
6554/**
6555 * @opcode 0xcd
6556 */
6557FNIEMOP_DEF(iemOp_int_Ib)
6558{
6559 IEMOP_MNEMONIC(int_Ib, "int Ib");
6560 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6562 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6563}
6564
6565
6566/**
6567 * @opcode 0xce
6568 */
6569FNIEMOP_DEF(iemOp_into)
6570{
6571 IEMOP_MNEMONIC(into, "into");
6572 IEMOP_HLP_NO_64BIT();
6573
6574 IEM_MC_BEGIN(2, 0);
6575 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6576 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6577 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6578 IEM_MC_END();
6579 return VINF_SUCCESS;
6580}
6581
6582
6583/**
6584 * @opcode 0xcf
6585 */
6586FNIEMOP_DEF(iemOp_iret)
6587{
6588 IEMOP_MNEMONIC(iret, "iret");
6589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6590 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6591}
6592
6593
6594/**
6595 * @opcode 0xd0
6596 */
6597FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6598{
6599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6600 PCIEMOPSHIFTSIZES pImpl;
6601 switch (IEM_GET_MODRM_REG_8(bRm))
6602 {
6603 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6604 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6605 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6606 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6607 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6608 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6609 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6610 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6611 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6612 }
6613 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6614
6615 if (IEM_IS_MODRM_REG_MODE(bRm))
6616 {
6617 /* register */
6618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6619 IEM_MC_BEGIN(3, 0);
6620 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6621 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6622 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6623 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6624 IEM_MC_REF_EFLAGS(pEFlags);
6625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6626 IEM_MC_ADVANCE_RIP();
6627 IEM_MC_END();
6628 }
6629 else
6630 {
6631 /* memory */
6632 IEM_MC_BEGIN(3, 2);
6633 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6634 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6635 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6637
6638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6640 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6641 IEM_MC_FETCH_EFLAGS(EFlags);
6642 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6643
6644 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6645 IEM_MC_COMMIT_EFLAGS(EFlags);
6646 IEM_MC_ADVANCE_RIP();
6647 IEM_MC_END();
6648 }
6649 return VINF_SUCCESS;
6650}
6651
6652
6653
6654/**
6655 * @opcode 0xd1
6656 */
6657FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6658{
6659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6660 PCIEMOPSHIFTSIZES pImpl;
6661 switch (IEM_GET_MODRM_REG_8(bRm))
6662 {
6663 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6664 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6665 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6666 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6667 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6668 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6669 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6670 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6671 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6672 }
6673 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6674
6675 if (IEM_IS_MODRM_REG_MODE(bRm))
6676 {
6677 /* register */
6678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6679 switch (pVCpu->iem.s.enmEffOpSize)
6680 {
6681 case IEMMODE_16BIT:
6682 IEM_MC_BEGIN(3, 0);
6683 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6684 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6685 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6686 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6687 IEM_MC_REF_EFLAGS(pEFlags);
6688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6689 IEM_MC_ADVANCE_RIP();
6690 IEM_MC_END();
6691 return VINF_SUCCESS;
6692
6693 case IEMMODE_32BIT:
6694 IEM_MC_BEGIN(3, 0);
6695 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6696 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6697 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6698 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6699 IEM_MC_REF_EFLAGS(pEFlags);
6700 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6701 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6702 IEM_MC_ADVANCE_RIP();
6703 IEM_MC_END();
6704 return VINF_SUCCESS;
6705
6706 case IEMMODE_64BIT:
6707 IEM_MC_BEGIN(3, 0);
6708 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6709 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6710 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6711 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6712 IEM_MC_REF_EFLAGS(pEFlags);
6713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6714 IEM_MC_ADVANCE_RIP();
6715 IEM_MC_END();
6716 return VINF_SUCCESS;
6717
6718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6719 }
6720 }
6721 else
6722 {
6723 /* memory */
6724 switch (pVCpu->iem.s.enmEffOpSize)
6725 {
6726 case IEMMODE_16BIT:
6727 IEM_MC_BEGIN(3, 2);
6728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6729 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6730 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6732
6733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6735 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6736 IEM_MC_FETCH_EFLAGS(EFlags);
6737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6738
6739 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6740 IEM_MC_COMMIT_EFLAGS(EFlags);
6741 IEM_MC_ADVANCE_RIP();
6742 IEM_MC_END();
6743 return VINF_SUCCESS;
6744
6745 case IEMMODE_32BIT:
6746 IEM_MC_BEGIN(3, 2);
6747 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6748 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6749 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6751
6752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6754 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6755 IEM_MC_FETCH_EFLAGS(EFlags);
6756 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6757
6758 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6759 IEM_MC_COMMIT_EFLAGS(EFlags);
6760 IEM_MC_ADVANCE_RIP();
6761 IEM_MC_END();
6762 return VINF_SUCCESS;
6763
6764 case IEMMODE_64BIT:
6765 IEM_MC_BEGIN(3, 2);
6766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6767 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6768 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6770
6771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6774 IEM_MC_FETCH_EFLAGS(EFlags);
6775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6776
6777 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6778 IEM_MC_COMMIT_EFLAGS(EFlags);
6779 IEM_MC_ADVANCE_RIP();
6780 IEM_MC_END();
6781 return VINF_SUCCESS;
6782
6783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6784 }
6785 }
6786}
6787
6788
6789/**
6790 * @opcode 0xd2
6791 */
6792FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6793{
6794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6795 PCIEMOPSHIFTSIZES pImpl;
6796 switch (IEM_GET_MODRM_REG_8(bRm))
6797 {
6798 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6799 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6800 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6801 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6802 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6803 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6804 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6805 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6807 }
6808 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6809
6810 if (IEM_IS_MODRM_REG_MODE(bRm))
6811 {
6812 /* register */
6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6814 IEM_MC_BEGIN(3, 0);
6815 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6816 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6817 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6818 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6819 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6820 IEM_MC_REF_EFLAGS(pEFlags);
6821 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6822 IEM_MC_ADVANCE_RIP();
6823 IEM_MC_END();
6824 }
6825 else
6826 {
6827 /* memory */
6828 IEM_MC_BEGIN(3, 2);
6829 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6830 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6831 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6833
6834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6836 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6837 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6838 IEM_MC_FETCH_EFLAGS(EFlags);
6839 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6840
6841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6842 IEM_MC_COMMIT_EFLAGS(EFlags);
6843 IEM_MC_ADVANCE_RIP();
6844 IEM_MC_END();
6845 }
6846 return VINF_SUCCESS;
6847}
6848
6849
6850/**
6851 * @opcode 0xd3
6852 */
6853FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6854{
6855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6856 PCIEMOPSHIFTSIZES pImpl;
6857 switch (IEM_GET_MODRM_REG_8(bRm))
6858 {
6859 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6860 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6861 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6862 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6863 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6864 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6865 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6866 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6867 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6868 }
6869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6870
6871 if (IEM_IS_MODRM_REG_MODE(bRm))
6872 {
6873 /* register */
6874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6875 switch (pVCpu->iem.s.enmEffOpSize)
6876 {
6877 case IEMMODE_16BIT:
6878 IEM_MC_BEGIN(3, 0);
6879 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6880 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6881 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6882 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6883 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6884 IEM_MC_REF_EFLAGS(pEFlags);
6885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6886 IEM_MC_ADVANCE_RIP();
6887 IEM_MC_END();
6888 return VINF_SUCCESS;
6889
6890 case IEMMODE_32BIT:
6891 IEM_MC_BEGIN(3, 0);
6892 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6893 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6894 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6895 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6896 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6897 IEM_MC_REF_EFLAGS(pEFlags);
6898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6899 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6900 IEM_MC_ADVANCE_RIP();
6901 IEM_MC_END();
6902 return VINF_SUCCESS;
6903
6904 case IEMMODE_64BIT:
6905 IEM_MC_BEGIN(3, 0);
6906 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6907 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6908 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6909 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6910 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6911 IEM_MC_REF_EFLAGS(pEFlags);
6912 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6913 IEM_MC_ADVANCE_RIP();
6914 IEM_MC_END();
6915 return VINF_SUCCESS;
6916
6917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6918 }
6919 }
6920 else
6921 {
6922 /* memory */
6923 switch (pVCpu->iem.s.enmEffOpSize)
6924 {
6925 case IEMMODE_16BIT:
6926 IEM_MC_BEGIN(3, 2);
6927 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6928 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6929 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6931
6932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6934 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6935 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6936 IEM_MC_FETCH_EFLAGS(EFlags);
6937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6938
6939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6940 IEM_MC_COMMIT_EFLAGS(EFlags);
6941 IEM_MC_ADVANCE_RIP();
6942 IEM_MC_END();
6943 return VINF_SUCCESS;
6944
6945 case IEMMODE_32BIT:
6946 IEM_MC_BEGIN(3, 2);
6947 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6948 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6949 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6951
6952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6954 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6955 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6956 IEM_MC_FETCH_EFLAGS(EFlags);
6957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6958
6959 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6960 IEM_MC_COMMIT_EFLAGS(EFlags);
6961 IEM_MC_ADVANCE_RIP();
6962 IEM_MC_END();
6963 return VINF_SUCCESS;
6964
6965 case IEMMODE_64BIT:
6966 IEM_MC_BEGIN(3, 2);
6967 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6968 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6969 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6971
6972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6974 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6975 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6976 IEM_MC_FETCH_EFLAGS(EFlags);
6977 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6978
6979 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6980 IEM_MC_COMMIT_EFLAGS(EFlags);
6981 IEM_MC_ADVANCE_RIP();
6982 IEM_MC_END();
6983 return VINF_SUCCESS;
6984
6985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6986 }
6987 }
6988}
6989
6990/**
6991 * @opcode 0xd4
6992 */
6993FNIEMOP_DEF(iemOp_aam_Ib)
6994{
6995 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6996 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6998 IEMOP_HLP_NO_64BIT();
6999 if (!bImm)
7000 return IEMOP_RAISE_DIVIDE_ERROR();
7001 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
7002}
7003
7004
7005/**
7006 * @opcode 0xd5
7007 */
7008FNIEMOP_DEF(iemOp_aad_Ib)
7009{
7010 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7013 IEMOP_HLP_NO_64BIT();
7014 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7015}
7016
7017
7018/**
7019 * @opcode 0xd6
7020 */
7021FNIEMOP_DEF(iemOp_salc)
7022{
7023 IEMOP_MNEMONIC(salc, "salc");
7024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7025 IEMOP_HLP_NO_64BIT();
7026
7027 IEM_MC_BEGIN(0, 0);
7028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7029 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7030 } IEM_MC_ELSE() {
7031 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7032 } IEM_MC_ENDIF();
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 return VINF_SUCCESS;
7036}
7037
7038
7039/**
7040 * @opcode 0xd7
7041 */
7042FNIEMOP_DEF(iemOp_xlat)
7043{
7044 IEMOP_MNEMONIC(xlat, "xlat");
7045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7046 switch (pVCpu->iem.s.enmEffAddrMode)
7047 {
7048 case IEMMODE_16BIT:
7049 IEM_MC_BEGIN(2, 0);
7050 IEM_MC_LOCAL(uint8_t, u8Tmp);
7051 IEM_MC_LOCAL(uint16_t, u16Addr);
7052 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7053 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7054 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7055 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7056 IEM_MC_ADVANCE_RIP();
7057 IEM_MC_END();
7058 return VINF_SUCCESS;
7059
7060 case IEMMODE_32BIT:
7061 IEM_MC_BEGIN(2, 0);
7062 IEM_MC_LOCAL(uint8_t, u8Tmp);
7063 IEM_MC_LOCAL(uint32_t, u32Addr);
7064 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7065 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7066 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7067 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7068 IEM_MC_ADVANCE_RIP();
7069 IEM_MC_END();
7070 return VINF_SUCCESS;
7071
7072 case IEMMODE_64BIT:
7073 IEM_MC_BEGIN(2, 0);
7074 IEM_MC_LOCAL(uint8_t, u8Tmp);
7075 IEM_MC_LOCAL(uint64_t, u64Addr);
7076 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7077 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7078 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7079 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 return VINF_SUCCESS;
7083
7084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7085 }
7086}
7087
7088
7089/**
7090 * Common worker for FPU instructions working on ST0 and STn, and storing the
7091 * result in ST0.
7092 *
7093 * @param bRm Mod R/M byte.
7094 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7095 */
7096FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7097{
7098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7099
7100 IEM_MC_BEGIN(3, 1);
7101 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7102 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7103 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7104 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7105
7106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7108 IEM_MC_PREPARE_FPU_USAGE();
7109 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7110 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7111 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7112 IEM_MC_ELSE()
7113 IEM_MC_FPU_STACK_UNDERFLOW(0);
7114 IEM_MC_ENDIF();
7115 IEM_MC_ADVANCE_RIP();
7116
7117 IEM_MC_END();
7118 return VINF_SUCCESS;
7119}
7120
7121
7122/**
7123 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7124 * flags.
7125 *
7126 * @param bRm Mod R/M byte.
7127 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7128 */
7129FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7130{
7131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7132
7133 IEM_MC_BEGIN(3, 1);
7134 IEM_MC_LOCAL(uint16_t, u16Fsw);
7135 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7136 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7137 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7138
7139 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7140 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7141 IEM_MC_PREPARE_FPU_USAGE();
7142 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7143 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7144 IEM_MC_UPDATE_FSW(u16Fsw);
7145 IEM_MC_ELSE()
7146 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7147 IEM_MC_ENDIF();
7148 IEM_MC_ADVANCE_RIP();
7149
7150 IEM_MC_END();
7151 return VINF_SUCCESS;
7152}
7153
7154
7155/**
7156 * Common worker for FPU instructions working on ST0 and STn, only affecting
7157 * flags, and popping when done.
7158 *
7159 * @param bRm Mod R/M byte.
7160 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7161 */
7162FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7163{
7164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7165
7166 IEM_MC_BEGIN(3, 1);
7167 IEM_MC_LOCAL(uint16_t, u16Fsw);
7168 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7169 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7170 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7171
7172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7174 IEM_MC_PREPARE_FPU_USAGE();
7175 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7176 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7177 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7178 IEM_MC_ELSE()
7179 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7180 IEM_MC_ENDIF();
7181 IEM_MC_ADVANCE_RIP();
7182
7183 IEM_MC_END();
7184 return VINF_SUCCESS;
7185}
7186
7187
7188/** Opcode 0xd8 11/0. */
7189FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7190{
7191 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7193}
7194
7195
7196/** Opcode 0xd8 11/1. */
7197FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7198{
7199 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7201}
7202
7203
7204/** Opcode 0xd8 11/2. */
7205FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7206{
7207 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7208 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7209}
7210
7211
7212/** Opcode 0xd8 11/3. */
7213FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7214{
7215 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7216 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7217}
7218
7219
7220/** Opcode 0xd8 11/4. */
7221FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7222{
7223 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7224 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7225}
7226
7227
7228/** Opcode 0xd8 11/5. */
7229FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7230{
7231 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7232 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7233}
7234
7235
7236/** Opcode 0xd8 11/6. */
7237FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7238{
7239 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7240 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7241}
7242
7243
7244/** Opcode 0xd8 11/7. */
7245FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7246{
7247 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7248 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7249}
7250
7251
7252/**
7253 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7254 * the result in ST0.
7255 *
7256 * @param bRm Mod R/M byte.
7257 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7258 */
7259FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7260{
7261 IEM_MC_BEGIN(3, 3);
7262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7263 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7264 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7265 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7266 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7267 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7268
7269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7271
7272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7274 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7275
7276 IEM_MC_PREPARE_FPU_USAGE();
7277 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7278 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7279 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7280 IEM_MC_ELSE()
7281 IEM_MC_FPU_STACK_UNDERFLOW(0);
7282 IEM_MC_ENDIF();
7283 IEM_MC_ADVANCE_RIP();
7284
7285 IEM_MC_END();
7286 return VINF_SUCCESS;
7287}
7288
7289
7290/** Opcode 0xd8 !11/0. */
7291FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7292{
7293 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7294 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7295}
7296
7297
7298/** Opcode 0xd8 !11/1. */
7299FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7300{
7301 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7302 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7303}
7304
7305
7306/** Opcode 0xd8 !11/2. */
7307FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7308{
7309 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7310
7311 IEM_MC_BEGIN(3, 3);
7312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7313 IEM_MC_LOCAL(uint16_t, u16Fsw);
7314 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7315 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7317 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7318
7319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7321
7322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7324 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7325
7326 IEM_MC_PREPARE_FPU_USAGE();
7327 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7328 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7329 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7330 IEM_MC_ELSE()
7331 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7332 IEM_MC_ENDIF();
7333 IEM_MC_ADVANCE_RIP();
7334
7335 IEM_MC_END();
7336 return VINF_SUCCESS;
7337}
7338
7339
7340/** Opcode 0xd8 !11/3. */
7341FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7342{
7343 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7344
7345 IEM_MC_BEGIN(3, 3);
7346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7347 IEM_MC_LOCAL(uint16_t, u16Fsw);
7348 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7349 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7350 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7351 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7352
7353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7355
7356 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7357 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7358 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7359
7360 IEM_MC_PREPARE_FPU_USAGE();
7361 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7362 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7363 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7364 IEM_MC_ELSE()
7365 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7366 IEM_MC_ENDIF();
7367 IEM_MC_ADVANCE_RIP();
7368
7369 IEM_MC_END();
7370 return VINF_SUCCESS;
7371}
7372
7373
7374/** Opcode 0xd8 !11/4. */
7375FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7376{
7377 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7378 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7379}
7380
7381
7382/** Opcode 0xd8 !11/5. */
7383FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7384{
7385 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7386 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7387}
7388
7389
7390/** Opcode 0xd8 !11/6. */
7391FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7392{
7393 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7394 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7395}
7396
7397
7398/** Opcode 0xd8 !11/7. */
7399FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7400{
7401 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7402 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7403}
7404
7405
7406/**
7407 * @opcode 0xd8
7408 */
7409FNIEMOP_DEF(iemOp_EscF0)
7410{
7411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7412 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7413
7414 if (IEM_IS_MODRM_REG_MODE(bRm))
7415 {
7416 switch (IEM_GET_MODRM_REG_8(bRm))
7417 {
7418 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7419 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7420 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7421 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7422 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7423 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7424 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7425 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7427 }
7428 }
7429 else
7430 {
7431 switch (IEM_GET_MODRM_REG_8(bRm))
7432 {
7433 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7434 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7435 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7436 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7437 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7438 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7439 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7440 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7442 }
7443 }
7444}
7445
7446
7447/** Opcode 0xd9 /0 mem32real
7448 * @sa iemOp_fld_m64r */
7449FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7450{
7451 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7452
7453 IEM_MC_BEGIN(2, 3);
7454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7455 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7456 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7457 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7458 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7459
7460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7462
7463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7465 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7466
7467 IEM_MC_PREPARE_FPU_USAGE();
7468 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7469 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
7470 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7471 IEM_MC_ELSE()
7472 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7473 IEM_MC_ENDIF();
7474 IEM_MC_ADVANCE_RIP();
7475
7476 IEM_MC_END();
7477 return VINF_SUCCESS;
7478}
7479
7480
7481/** Opcode 0xd9 !11/2 mem32real */
7482FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7483{
7484 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7485 IEM_MC_BEGIN(3, 2);
7486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7487 IEM_MC_LOCAL(uint16_t, u16Fsw);
7488 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7489 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7490 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7491
7492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7496
7497 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7498 IEM_MC_PREPARE_FPU_USAGE();
7499 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7500 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7501 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7502 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7503 IEM_MC_ELSE()
7504 IEM_MC_IF_FCW_IM()
7505 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7506 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7507 IEM_MC_ENDIF();
7508 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7509 IEM_MC_ENDIF();
7510 IEM_MC_ADVANCE_RIP();
7511
7512 IEM_MC_END();
7513 return VINF_SUCCESS;
7514}
7515
7516
7517/** Opcode 0xd9 !11/3 */
7518FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7519{
7520 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7521 IEM_MC_BEGIN(3, 2);
7522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7523 IEM_MC_LOCAL(uint16_t, u16Fsw);
7524 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7525 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7526 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7527
7528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7532
7533 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7534 IEM_MC_PREPARE_FPU_USAGE();
7535 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7536 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7537 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7538 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7539 IEM_MC_ELSE()
7540 IEM_MC_IF_FCW_IM()
7541 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7542 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7543 IEM_MC_ENDIF();
7544 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7545 IEM_MC_ENDIF();
7546 IEM_MC_ADVANCE_RIP();
7547
7548 IEM_MC_END();
7549 return VINF_SUCCESS;
7550}
7551
7552
7553/** Opcode 0xd9 !11/4 */
7554FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7555{
7556 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7557 IEM_MC_BEGIN(3, 0);
7558 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7559 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7560 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7563 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7564 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7565 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7566 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7567 IEM_MC_END();
7568 return VINF_SUCCESS;
7569}
7570
7571
7572/** Opcode 0xd9 !11/5 */
7573FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7574{
7575 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7576 IEM_MC_BEGIN(1, 1);
7577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7578 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7582 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7583 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7584 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7585 IEM_MC_END();
7586 return VINF_SUCCESS;
7587}
7588
7589
7590/** Opcode 0xd9 !11/6 */
7591FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7592{
7593 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7594 IEM_MC_BEGIN(3, 0);
7595 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7596 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7597 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7600 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7601 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7602 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7603 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7604 IEM_MC_END();
7605 return VINF_SUCCESS;
7606}
7607
7608
7609/** Opcode 0xd9 !11/7 */
7610FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7611{
7612 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7613 IEM_MC_BEGIN(2, 0);
7614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7615 IEM_MC_LOCAL(uint16_t, u16Fcw);
7616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7619 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7620 IEM_MC_FETCH_FCW(u16Fcw);
7621 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7622 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7623 IEM_MC_END();
7624 return VINF_SUCCESS;
7625}
7626
7627
7628/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7629FNIEMOP_DEF(iemOp_fnop)
7630{
7631 IEMOP_MNEMONIC(fnop, "fnop");
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7633
7634 IEM_MC_BEGIN(0, 0);
7635 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7637 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7638 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7639 * intel optimizations. Investigate. */
7640 IEM_MC_UPDATE_FPU_OPCODE_IP();
7641 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7642 IEM_MC_END();
7643 return VINF_SUCCESS;
7644}
7645
7646
7647/** Opcode 0xd9 11/0 stN */
7648FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7649{
7650 IEMOP_MNEMONIC(fld_stN, "fld stN");
7651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7652
7653 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7654 * indicates that it does. */
7655 IEM_MC_BEGIN(0, 2);
7656 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7657 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7660
7661 IEM_MC_PREPARE_FPU_USAGE();
7662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm))
7663 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7664 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7665 IEM_MC_ELSE()
7666 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7667 IEM_MC_ENDIF();
7668
7669 IEM_MC_ADVANCE_RIP();
7670 IEM_MC_END();
7671
7672 return VINF_SUCCESS;
7673}
7674
7675
7676/** Opcode 0xd9 11/3 stN */
7677FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7678{
7679 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7681
7682 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7683 * indicates that it does. */
7684 IEM_MC_BEGIN(1, 3);
7685 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7686 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7687 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7688 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
7689 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7690 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7691
7692 IEM_MC_PREPARE_FPU_USAGE();
7693 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7694 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7695 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
7696 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7697 IEM_MC_ELSE()
7698 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7699 IEM_MC_ENDIF();
7700
7701 IEM_MC_ADVANCE_RIP();
7702 IEM_MC_END();
7703
7704 return VINF_SUCCESS;
7705}
7706
7707
7708/** Opcode 0xd9 11/4, 0xdd 11/2. */
7709FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7710{
7711 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7713
7714 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7715 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
7716 if (!iDstReg)
7717 {
7718 IEM_MC_BEGIN(0, 1);
7719 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7722
7723 IEM_MC_PREPARE_FPU_USAGE();
7724 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7725 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7726 IEM_MC_ELSE()
7727 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7728 IEM_MC_ENDIF();
7729
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 }
7733 else
7734 {
7735 IEM_MC_BEGIN(0, 2);
7736 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7737 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7740
7741 IEM_MC_PREPARE_FPU_USAGE();
7742 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7743 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7744 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7745 IEM_MC_ELSE()
7746 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7747 IEM_MC_ENDIF();
7748
7749 IEM_MC_ADVANCE_RIP();
7750 IEM_MC_END();
7751 }
7752 return VINF_SUCCESS;
7753}
7754
7755
7756/**
7757 * Common worker for FPU instructions working on ST0 and replaces it with the
7758 * result, i.e. unary operators.
7759 *
7760 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7761 */
7762FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7763{
7764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7765
7766 IEM_MC_BEGIN(2, 1);
7767 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7768 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7770
7771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7773 IEM_MC_PREPARE_FPU_USAGE();
7774 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7775 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7776 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7777 IEM_MC_ELSE()
7778 IEM_MC_FPU_STACK_UNDERFLOW(0);
7779 IEM_MC_ENDIF();
7780 IEM_MC_ADVANCE_RIP();
7781
7782 IEM_MC_END();
7783 return VINF_SUCCESS;
7784}
7785
7786
7787/** Opcode 0xd9 0xe0. */
7788FNIEMOP_DEF(iemOp_fchs)
7789{
7790 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7791 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7792}
7793
7794
7795/** Opcode 0xd9 0xe1. */
7796FNIEMOP_DEF(iemOp_fabs)
7797{
7798 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7799 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7800}
7801
7802
7803/** Opcode 0xd9 0xe4. */
7804FNIEMOP_DEF(iemOp_ftst)
7805{
7806 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808
7809 IEM_MC_BEGIN(2, 1);
7810 IEM_MC_LOCAL(uint16_t, u16Fsw);
7811 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7812 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7813
7814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7816 IEM_MC_PREPARE_FPU_USAGE();
7817 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7818 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
7819 IEM_MC_UPDATE_FSW(u16Fsw);
7820 IEM_MC_ELSE()
7821 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7822 IEM_MC_ENDIF();
7823 IEM_MC_ADVANCE_RIP();
7824
7825 IEM_MC_END();
7826 return VINF_SUCCESS;
7827}
7828
7829
7830/** Opcode 0xd9 0xe5. */
7831FNIEMOP_DEF(iemOp_fxam)
7832{
7833 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835
7836 IEM_MC_BEGIN(2, 1);
7837 IEM_MC_LOCAL(uint16_t, u16Fsw);
7838 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7839 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7840
7841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7843 IEM_MC_PREPARE_FPU_USAGE();
7844 IEM_MC_REF_FPUREG(pr80Value, 0);
7845 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
7846 IEM_MC_UPDATE_FSW(u16Fsw);
7847 IEM_MC_ADVANCE_RIP();
7848
7849 IEM_MC_END();
7850 return VINF_SUCCESS;
7851}
7852
7853
7854/**
7855 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7856 *
7857 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7858 */
7859FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7860{
7861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7862
7863 IEM_MC_BEGIN(1, 1);
7864 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7865 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7866
7867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7869 IEM_MC_PREPARE_FPU_USAGE();
7870 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7871 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7872 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7873 IEM_MC_ELSE()
7874 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7875 IEM_MC_ENDIF();
7876 IEM_MC_ADVANCE_RIP();
7877
7878 IEM_MC_END();
7879 return VINF_SUCCESS;
7880}
7881
7882
7883/** Opcode 0xd9 0xe8. */
7884FNIEMOP_DEF(iemOp_fld1)
7885{
7886 IEMOP_MNEMONIC(fld1, "fld1");
7887 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7888}
7889
7890
7891/** Opcode 0xd9 0xe9. */
7892FNIEMOP_DEF(iemOp_fldl2t)
7893{
7894 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7895 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7896}
7897
7898
7899/** Opcode 0xd9 0xea. */
7900FNIEMOP_DEF(iemOp_fldl2e)
7901{
7902 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7903 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7904}
7905
7906/** Opcode 0xd9 0xeb. */
7907FNIEMOP_DEF(iemOp_fldpi)
7908{
7909 IEMOP_MNEMONIC(fldpi, "fldpi");
7910 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7911}
7912
7913
7914/** Opcode 0xd9 0xec. */
7915FNIEMOP_DEF(iemOp_fldlg2)
7916{
7917 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7918 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7919}
7920
7921/** Opcode 0xd9 0xed. */
7922FNIEMOP_DEF(iemOp_fldln2)
7923{
7924 IEMOP_MNEMONIC(fldln2, "fldln2");
7925 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7926}
7927
7928
7929/** Opcode 0xd9 0xee. */
7930FNIEMOP_DEF(iemOp_fldz)
7931{
7932 IEMOP_MNEMONIC(fldz, "fldz");
7933 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7934}
7935
7936
7937/** Opcode 0xd9 0xf0.
7938 *
7939 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
7940 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
7941 * to produce proper results for +Inf and -Inf.
7942 *
7943 * This is probably usful in the implementation pow() and similar.
7944 */
7945FNIEMOP_DEF(iemOp_f2xm1)
7946{
7947 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7948 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7949}
7950
7951
7952/**
7953 * Common worker for FPU instructions working on STn and ST0, storing the result
7954 * in STn, and popping the stack unless IE, DE or ZE was raised.
7955 *
7956 * @param bRm Mod R/M byte.
7957 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7958 */
7959FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7960{
7961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7962
7963 IEM_MC_BEGIN(3, 1);
7964 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7965 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7966 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7967 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7968
7969 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7970 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7971
7972 IEM_MC_PREPARE_FPU_USAGE();
7973 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
7974 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7975 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
7976 IEM_MC_ELSE()
7977 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
7978 IEM_MC_ENDIF();
7979 IEM_MC_ADVANCE_RIP();
7980
7981 IEM_MC_END();
7982 return VINF_SUCCESS;
7983}
7984
7985
7986/** Opcode 0xd9 0xf1. */
7987FNIEMOP_DEF(iemOp_fyl2x)
7988{
7989 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7990 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7991}
7992
7993
7994/**
7995 * Common worker for FPU instructions working on ST0 and having two outputs, one
7996 * replacing ST0 and one pushed onto the stack.
7997 *
7998 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7999 */
8000FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8001{
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003
8004 IEM_MC_BEGIN(2, 1);
8005 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8006 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8007 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8008
8009 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8010 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8011 IEM_MC_PREPARE_FPU_USAGE();
8012 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8013 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8014 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8015 IEM_MC_ELSE()
8016 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8017 IEM_MC_ENDIF();
8018 IEM_MC_ADVANCE_RIP();
8019
8020 IEM_MC_END();
8021 return VINF_SUCCESS;
8022}
8023
8024
8025/** Opcode 0xd9 0xf2. */
8026FNIEMOP_DEF(iemOp_fptan)
8027{
8028 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8029 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8030}
8031
8032
8033/** Opcode 0xd9 0xf3. */
8034FNIEMOP_DEF(iemOp_fpatan)
8035{
8036 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8037 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8038}
8039
8040
8041/** Opcode 0xd9 0xf4. */
8042FNIEMOP_DEF(iemOp_fxtract)
8043{
8044 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8045 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8046}
8047
8048
8049/** Opcode 0xd9 0xf5. */
8050FNIEMOP_DEF(iemOp_fprem1)
8051{
8052 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8053 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8054}
8055
8056
8057/** Opcode 0xd9 0xf6. */
8058FNIEMOP_DEF(iemOp_fdecstp)
8059{
8060 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8062 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8063 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8064 * FINCSTP and FDECSTP. */
8065
8066 IEM_MC_BEGIN(0,0);
8067
8068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8070
8071 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8072 IEM_MC_FPU_STACK_DEC_TOP();
8073 IEM_MC_UPDATE_FSW_CONST(0);
8074
8075 IEM_MC_ADVANCE_RIP();
8076 IEM_MC_END();
8077 return VINF_SUCCESS;
8078}
8079
8080
8081/** Opcode 0xd9 0xf7. */
8082FNIEMOP_DEF(iemOp_fincstp)
8083{
8084 IEMOP_MNEMONIC(fincstp, "fincstp");
8085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8086 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8087 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8088 * FINCSTP and FDECSTP. */
8089
8090 IEM_MC_BEGIN(0,0);
8091
8092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8094
8095 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8096 IEM_MC_FPU_STACK_INC_TOP();
8097 IEM_MC_UPDATE_FSW_CONST(0);
8098
8099 IEM_MC_ADVANCE_RIP();
8100 IEM_MC_END();
8101 return VINF_SUCCESS;
8102}
8103
8104
8105/** Opcode 0xd9 0xf8. */
8106FNIEMOP_DEF(iemOp_fprem)
8107{
8108 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8109 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8110}
8111
8112
8113/** Opcode 0xd9 0xf9. */
8114FNIEMOP_DEF(iemOp_fyl2xp1)
8115{
8116 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8117 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8118}
8119
8120
8121/** Opcode 0xd9 0xfa. */
8122FNIEMOP_DEF(iemOp_fsqrt)
8123{
8124 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8125 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8126}
8127
8128
8129/** Opcode 0xd9 0xfb. */
8130FNIEMOP_DEF(iemOp_fsincos)
8131{
8132 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8133 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8134}
8135
8136
8137/** Opcode 0xd9 0xfc. */
8138FNIEMOP_DEF(iemOp_frndint)
8139{
8140 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8141 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8142}
8143
8144
8145/** Opcode 0xd9 0xfd. */
8146FNIEMOP_DEF(iemOp_fscale)
8147{
8148 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8149 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8150}
8151
8152
8153/** Opcode 0xd9 0xfe. */
8154FNIEMOP_DEF(iemOp_fsin)
8155{
8156 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8157 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8158}
8159
8160
8161/** Opcode 0xd9 0xff. */
8162FNIEMOP_DEF(iemOp_fcos)
8163{
8164 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8165 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8166}
8167
8168
8169/** Used by iemOp_EscF1. */
8170IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8171{
8172 /* 0xe0 */ iemOp_fchs,
8173 /* 0xe1 */ iemOp_fabs,
8174 /* 0xe2 */ iemOp_Invalid,
8175 /* 0xe3 */ iemOp_Invalid,
8176 /* 0xe4 */ iemOp_ftst,
8177 /* 0xe5 */ iemOp_fxam,
8178 /* 0xe6 */ iemOp_Invalid,
8179 /* 0xe7 */ iemOp_Invalid,
8180 /* 0xe8 */ iemOp_fld1,
8181 /* 0xe9 */ iemOp_fldl2t,
8182 /* 0xea */ iemOp_fldl2e,
8183 /* 0xeb */ iemOp_fldpi,
8184 /* 0xec */ iemOp_fldlg2,
8185 /* 0xed */ iemOp_fldln2,
8186 /* 0xee */ iemOp_fldz,
8187 /* 0xef */ iemOp_Invalid,
8188 /* 0xf0 */ iemOp_f2xm1,
8189 /* 0xf1 */ iemOp_fyl2x,
8190 /* 0xf2 */ iemOp_fptan,
8191 /* 0xf3 */ iemOp_fpatan,
8192 /* 0xf4 */ iemOp_fxtract,
8193 /* 0xf5 */ iemOp_fprem1,
8194 /* 0xf6 */ iemOp_fdecstp,
8195 /* 0xf7 */ iemOp_fincstp,
8196 /* 0xf8 */ iemOp_fprem,
8197 /* 0xf9 */ iemOp_fyl2xp1,
8198 /* 0xfa */ iemOp_fsqrt,
8199 /* 0xfb */ iemOp_fsincos,
8200 /* 0xfc */ iemOp_frndint,
8201 /* 0xfd */ iemOp_fscale,
8202 /* 0xfe */ iemOp_fsin,
8203 /* 0xff */ iemOp_fcos
8204};
8205
8206
8207/**
8208 * @opcode 0xd9
8209 */
8210FNIEMOP_DEF(iemOp_EscF1)
8211{
8212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8213 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8214
8215 if (IEM_IS_MODRM_REG_MODE(bRm))
8216 {
8217 switch (IEM_GET_MODRM_REG_8(bRm))
8218 {
8219 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8220 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8221 case 2:
8222 if (bRm == 0xd0)
8223 return FNIEMOP_CALL(iemOp_fnop);
8224 return IEMOP_RAISE_INVALID_OPCODE();
8225 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8226 case 4:
8227 case 5:
8228 case 6:
8229 case 7:
8230 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8231 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8233 }
8234 }
8235 else
8236 {
8237 switch (IEM_GET_MODRM_REG_8(bRm))
8238 {
8239 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8240 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8241 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8242 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8243 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8244 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8245 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8246 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8248 }
8249 }
8250}
8251
8252
8253/** Opcode 0xda 11/0. */
8254FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8255{
8256 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8258
8259 IEM_MC_BEGIN(0, 1);
8260 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8261
8262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8263 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8264
8265 IEM_MC_PREPARE_FPU_USAGE();
8266 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8268 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8269 IEM_MC_ENDIF();
8270 IEM_MC_UPDATE_FPU_OPCODE_IP();
8271 IEM_MC_ELSE()
8272 IEM_MC_FPU_STACK_UNDERFLOW(0);
8273 IEM_MC_ENDIF();
8274 IEM_MC_ADVANCE_RIP();
8275
8276 IEM_MC_END();
8277 return VINF_SUCCESS;
8278}
8279
8280
8281/** Opcode 0xda 11/1. */
8282FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8283{
8284 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8286
8287 IEM_MC_BEGIN(0, 1);
8288 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8289
8290 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8291 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8292
8293 IEM_MC_PREPARE_FPU_USAGE();
8294 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8296 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8297 IEM_MC_ENDIF();
8298 IEM_MC_UPDATE_FPU_OPCODE_IP();
8299 IEM_MC_ELSE()
8300 IEM_MC_FPU_STACK_UNDERFLOW(0);
8301 IEM_MC_ENDIF();
8302 IEM_MC_ADVANCE_RIP();
8303
8304 IEM_MC_END();
8305 return VINF_SUCCESS;
8306}
8307
8308
8309/** Opcode 0xda 11/2. */
8310FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8311{
8312 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8314
8315 IEM_MC_BEGIN(0, 1);
8316 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8317
8318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8320
8321 IEM_MC_PREPARE_FPU_USAGE();
8322 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8323 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8324 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8325 IEM_MC_ENDIF();
8326 IEM_MC_UPDATE_FPU_OPCODE_IP();
8327 IEM_MC_ELSE()
8328 IEM_MC_FPU_STACK_UNDERFLOW(0);
8329 IEM_MC_ENDIF();
8330 IEM_MC_ADVANCE_RIP();
8331
8332 IEM_MC_END();
8333 return VINF_SUCCESS;
8334}
8335
8336
8337/** Opcode 0xda 11/3. */
8338FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8339{
8340 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8342
8343 IEM_MC_BEGIN(0, 1);
8344 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8345
8346 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8347 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8348
8349 IEM_MC_PREPARE_FPU_USAGE();
8350 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8352 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8353 IEM_MC_ENDIF();
8354 IEM_MC_UPDATE_FPU_OPCODE_IP();
8355 IEM_MC_ELSE()
8356 IEM_MC_FPU_STACK_UNDERFLOW(0);
8357 IEM_MC_ENDIF();
8358 IEM_MC_ADVANCE_RIP();
8359
8360 IEM_MC_END();
8361 return VINF_SUCCESS;
8362}
8363
8364
8365/**
8366 * Common worker for FPU instructions working on ST0 and STn, only affecting
8367 * flags, and popping twice when done.
8368 *
8369 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8370 */
8371FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8372{
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374
8375 IEM_MC_BEGIN(3, 1);
8376 IEM_MC_LOCAL(uint16_t, u16Fsw);
8377 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8378 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8379 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8380
8381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8383
8384 IEM_MC_PREPARE_FPU_USAGE();
8385 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8386 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8387 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8388 IEM_MC_ELSE()
8389 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8390 IEM_MC_ENDIF();
8391 IEM_MC_ADVANCE_RIP();
8392
8393 IEM_MC_END();
8394 return VINF_SUCCESS;
8395}
8396
8397
8398/** Opcode 0xda 0xe9. */
8399FNIEMOP_DEF(iemOp_fucompp)
8400{
8401 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8402 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8403}
8404
8405
8406/**
8407 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8408 * the result in ST0.
8409 *
8410 * @param bRm Mod R/M byte.
8411 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8412 */
8413FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8414{
8415 IEM_MC_BEGIN(3, 3);
8416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8417 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8418 IEM_MC_LOCAL(int32_t, i32Val2);
8419 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8420 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8421 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8422
8423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8425
8426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8428 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8429
8430 IEM_MC_PREPARE_FPU_USAGE();
8431 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8432 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8433 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8434 IEM_MC_ELSE()
8435 IEM_MC_FPU_STACK_UNDERFLOW(0);
8436 IEM_MC_ENDIF();
8437 IEM_MC_ADVANCE_RIP();
8438
8439 IEM_MC_END();
8440 return VINF_SUCCESS;
8441}
8442
8443
8444/** Opcode 0xda !11/0. */
8445FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8446{
8447 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8448 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8449}
8450
8451
8452/** Opcode 0xda !11/1. */
8453FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8454{
8455 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8456 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8457}
8458
8459
8460/** Opcode 0xda !11/2. */
8461FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8462{
8463 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8464
8465 IEM_MC_BEGIN(3, 3);
8466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8467 IEM_MC_LOCAL(uint16_t, u16Fsw);
8468 IEM_MC_LOCAL(int32_t, i32Val2);
8469 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8470 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8471 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8472
8473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8475
8476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8478 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8479
8480 IEM_MC_PREPARE_FPU_USAGE();
8481 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8483 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8484 IEM_MC_ELSE()
8485 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8486 IEM_MC_ENDIF();
8487 IEM_MC_ADVANCE_RIP();
8488
8489 IEM_MC_END();
8490 return VINF_SUCCESS;
8491}
8492
8493
8494/** Opcode 0xda !11/3. */
8495FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8496{
8497 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8498
8499 IEM_MC_BEGIN(3, 3);
8500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8501 IEM_MC_LOCAL(uint16_t, u16Fsw);
8502 IEM_MC_LOCAL(int32_t, i32Val2);
8503 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8504 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8505 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8506
8507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8509
8510 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8511 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8512 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8513
8514 IEM_MC_PREPARE_FPU_USAGE();
8515 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8516 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8517 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8518 IEM_MC_ELSE()
8519 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8520 IEM_MC_ENDIF();
8521 IEM_MC_ADVANCE_RIP();
8522
8523 IEM_MC_END();
8524 return VINF_SUCCESS;
8525}
8526
8527
8528/** Opcode 0xda !11/4. */
8529FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8530{
8531 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8532 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8533}
8534
8535
8536/** Opcode 0xda !11/5. */
8537FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8538{
8539 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8541}
8542
8543
8544/** Opcode 0xda !11/6. */
8545FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8546{
8547 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8548 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8549}
8550
8551
8552/** Opcode 0xda !11/7. */
8553FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8554{
8555 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8556 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8557}
8558
8559
8560/**
8561 * @opcode 0xda
8562 */
8563FNIEMOP_DEF(iemOp_EscF2)
8564{
8565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8566 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8567 if (IEM_IS_MODRM_REG_MODE(bRm))
8568 {
8569 switch (IEM_GET_MODRM_REG_8(bRm))
8570 {
8571 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8572 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8573 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8574 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8575 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8576 case 5:
8577 if (bRm == 0xe9)
8578 return FNIEMOP_CALL(iemOp_fucompp);
8579 return IEMOP_RAISE_INVALID_OPCODE();
8580 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8581 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8583 }
8584 }
8585 else
8586 {
8587 switch (IEM_GET_MODRM_REG_8(bRm))
8588 {
8589 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8590 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8591 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8592 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8593 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8594 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8595 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8596 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8598 }
8599 }
8600}
8601
8602
8603/** Opcode 0xdb !11/0. */
8604FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8605{
8606 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8607
8608 IEM_MC_BEGIN(2, 3);
8609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8610 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8611 IEM_MC_LOCAL(int32_t, i32Val);
8612 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8613 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8614
8615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8617
8618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8620 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8621
8622 IEM_MC_PREPARE_FPU_USAGE();
8623 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8624 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
8625 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8626 IEM_MC_ELSE()
8627 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8628 IEM_MC_ENDIF();
8629 IEM_MC_ADVANCE_RIP();
8630
8631 IEM_MC_END();
8632 return VINF_SUCCESS;
8633}
8634
8635
8636/** Opcode 0xdb !11/1. */
8637FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8638{
8639 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8640 IEM_MC_BEGIN(3, 2);
8641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8642 IEM_MC_LOCAL(uint16_t, u16Fsw);
8643 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8644 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8645 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8646
8647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8650 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8651
8652 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8653 IEM_MC_PREPARE_FPU_USAGE();
8654 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8655 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8656 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8657 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8658 IEM_MC_ELSE()
8659 IEM_MC_IF_FCW_IM()
8660 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8661 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8662 IEM_MC_ENDIF();
8663 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8664 IEM_MC_ENDIF();
8665 IEM_MC_ADVANCE_RIP();
8666
8667 IEM_MC_END();
8668 return VINF_SUCCESS;
8669}
8670
8671
8672/** Opcode 0xdb !11/2. */
8673FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8674{
8675 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8676 IEM_MC_BEGIN(3, 2);
8677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8678 IEM_MC_LOCAL(uint16_t, u16Fsw);
8679 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8680 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8681 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8682
8683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8685 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8686 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8687
8688 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8689 IEM_MC_PREPARE_FPU_USAGE();
8690 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8691 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8692 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8693 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8694 IEM_MC_ELSE()
8695 IEM_MC_IF_FCW_IM()
8696 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8697 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8698 IEM_MC_ENDIF();
8699 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8700 IEM_MC_ENDIF();
8701 IEM_MC_ADVANCE_RIP();
8702
8703 IEM_MC_END();
8704 return VINF_SUCCESS;
8705}
8706
8707
8708/** Opcode 0xdb !11/3. */
8709FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8710{
8711 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8712 IEM_MC_BEGIN(3, 2);
8713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8714 IEM_MC_LOCAL(uint16_t, u16Fsw);
8715 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8716 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8717 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8718
8719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8722 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8723
8724 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8725 IEM_MC_PREPARE_FPU_USAGE();
8726 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8727 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8728 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8729 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8730 IEM_MC_ELSE()
8731 IEM_MC_IF_FCW_IM()
8732 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8733 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8734 IEM_MC_ENDIF();
8735 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8736 IEM_MC_ENDIF();
8737 IEM_MC_ADVANCE_RIP();
8738
8739 IEM_MC_END();
8740 return VINF_SUCCESS;
8741}
8742
8743
8744/** Opcode 0xdb !11/5. */
8745FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8746{
8747 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8748
8749 IEM_MC_BEGIN(2, 3);
8750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8751 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8752 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8753 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8754 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8755
8756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8758
8759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8761 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8762
8763 IEM_MC_PREPARE_FPU_USAGE();
8764 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8765 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8766 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8767 IEM_MC_ELSE()
8768 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8769 IEM_MC_ENDIF();
8770 IEM_MC_ADVANCE_RIP();
8771
8772 IEM_MC_END();
8773 return VINF_SUCCESS;
8774}
8775
8776
8777/** Opcode 0xdb !11/7. */
8778FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8779{
8780 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8781 IEM_MC_BEGIN(3, 2);
8782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8783 IEM_MC_LOCAL(uint16_t, u16Fsw);
8784 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8785 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8786 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8787
8788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8792
8793 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
8794 IEM_MC_PREPARE_FPU_USAGE();
8795 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8796 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8797 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8798 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8799 IEM_MC_ELSE()
8800 IEM_MC_IF_FCW_IM()
8801 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8802 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8803 IEM_MC_ENDIF();
8804 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8805 IEM_MC_ENDIF();
8806 IEM_MC_ADVANCE_RIP();
8807
8808 IEM_MC_END();
8809 return VINF_SUCCESS;
8810}
8811
8812
8813/** Opcode 0xdb 11/0. */
8814FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8815{
8816 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8818
8819 IEM_MC_BEGIN(0, 1);
8820 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8821
8822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8824
8825 IEM_MC_PREPARE_FPU_USAGE();
8826 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8827 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8828 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8829 IEM_MC_ENDIF();
8830 IEM_MC_UPDATE_FPU_OPCODE_IP();
8831 IEM_MC_ELSE()
8832 IEM_MC_FPU_STACK_UNDERFLOW(0);
8833 IEM_MC_ENDIF();
8834 IEM_MC_ADVANCE_RIP();
8835
8836 IEM_MC_END();
8837 return VINF_SUCCESS;
8838}
8839
8840
8841/** Opcode 0xdb 11/1. */
8842FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8843{
8844 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846
8847 IEM_MC_BEGIN(0, 1);
8848 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8849
8850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8852
8853 IEM_MC_PREPARE_FPU_USAGE();
8854 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8855 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8856 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8857 IEM_MC_ENDIF();
8858 IEM_MC_UPDATE_FPU_OPCODE_IP();
8859 IEM_MC_ELSE()
8860 IEM_MC_FPU_STACK_UNDERFLOW(0);
8861 IEM_MC_ENDIF();
8862 IEM_MC_ADVANCE_RIP();
8863
8864 IEM_MC_END();
8865 return VINF_SUCCESS;
8866}
8867
8868
8869/** Opcode 0xdb 11/2. */
8870FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8871{
8872 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8874
8875 IEM_MC_BEGIN(0, 1);
8876 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8877
8878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8880
8881 IEM_MC_PREPARE_FPU_USAGE();
8882 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8883 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8884 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8885 IEM_MC_ENDIF();
8886 IEM_MC_UPDATE_FPU_OPCODE_IP();
8887 IEM_MC_ELSE()
8888 IEM_MC_FPU_STACK_UNDERFLOW(0);
8889 IEM_MC_ENDIF();
8890 IEM_MC_ADVANCE_RIP();
8891
8892 IEM_MC_END();
8893 return VINF_SUCCESS;
8894}
8895
8896
8897/** Opcode 0xdb 11/3. */
8898FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8899{
8900 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902
8903 IEM_MC_BEGIN(0, 1);
8904 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8905
8906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8908
8909 IEM_MC_PREPARE_FPU_USAGE();
8910 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8911 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8912 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8913 IEM_MC_ENDIF();
8914 IEM_MC_UPDATE_FPU_OPCODE_IP();
8915 IEM_MC_ELSE()
8916 IEM_MC_FPU_STACK_UNDERFLOW(0);
8917 IEM_MC_ENDIF();
8918 IEM_MC_ADVANCE_RIP();
8919
8920 IEM_MC_END();
8921 return VINF_SUCCESS;
8922}
8923
8924
8925/** Opcode 0xdb 0xe0. */
8926FNIEMOP_DEF(iemOp_fneni)
8927{
8928 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8930 IEM_MC_BEGIN(0,0);
8931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8932 IEM_MC_ADVANCE_RIP();
8933 IEM_MC_END();
8934 return VINF_SUCCESS;
8935}
8936
8937
8938/** Opcode 0xdb 0xe1. */
8939FNIEMOP_DEF(iemOp_fndisi)
8940{
8941 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8943 IEM_MC_BEGIN(0,0);
8944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8945 IEM_MC_ADVANCE_RIP();
8946 IEM_MC_END();
8947 return VINF_SUCCESS;
8948}
8949
8950
8951/** Opcode 0xdb 0xe2. */
8952FNIEMOP_DEF(iemOp_fnclex)
8953{
8954 IEMOP_MNEMONIC(fnclex, "fnclex");
8955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8956
8957 IEM_MC_BEGIN(0,0);
8958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8959 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8960 IEM_MC_CLEAR_FSW_EX();
8961 IEM_MC_ADVANCE_RIP();
8962 IEM_MC_END();
8963 return VINF_SUCCESS;
8964}
8965
8966
8967/** Opcode 0xdb 0xe3. */
8968FNIEMOP_DEF(iemOp_fninit)
8969{
8970 IEMOP_MNEMONIC(fninit, "fninit");
8971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8972 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8973}
8974
8975
8976/** Opcode 0xdb 0xe4. */
8977FNIEMOP_DEF(iemOp_fnsetpm)
8978{
8979 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8981 IEM_MC_BEGIN(0,0);
8982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8983 IEM_MC_ADVANCE_RIP();
8984 IEM_MC_END();
8985 return VINF_SUCCESS;
8986}
8987
8988
8989/** Opcode 0xdb 0xe5. */
8990FNIEMOP_DEF(iemOp_frstpm)
8991{
8992 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8993#if 0 /* #UDs on newer CPUs */
8994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8995 IEM_MC_BEGIN(0,0);
8996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8997 IEM_MC_ADVANCE_RIP();
8998 IEM_MC_END();
8999 return VINF_SUCCESS;
9000#else
9001 return IEMOP_RAISE_INVALID_OPCODE();
9002#endif
9003}
9004
9005
9006/** Opcode 0xdb 11/5. */
9007FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9008{
9009 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9010 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
9011}
9012
9013
9014/** Opcode 0xdb 11/6. */
9015FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9016{
9017 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9018 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
9019}
9020
9021
9022/**
9023 * @opcode 0xdb
9024 */
9025FNIEMOP_DEF(iemOp_EscF3)
9026{
9027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9028 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9029 if (IEM_IS_MODRM_REG_MODE(bRm))
9030 {
9031 switch (IEM_GET_MODRM_REG_8(bRm))
9032 {
9033 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9034 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9035 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9036 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9037 case 4:
9038 switch (bRm)
9039 {
9040 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9041 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9042 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9043 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9044 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9045 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9046 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9047 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9049 }
9050 break;
9051 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9052 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9053 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9055 }
9056 }
9057 else
9058 {
9059 switch (IEM_GET_MODRM_REG_8(bRm))
9060 {
9061 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9062 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9063 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9064 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9065 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9066 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9067 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9068 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9070 }
9071 }
9072}
9073
9074
9075/**
9076 * Common worker for FPU instructions working on STn and ST0, and storing the
9077 * result in STn unless IE, DE or ZE was raised.
9078 *
9079 * @param bRm Mod R/M byte.
9080 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9081 */
9082FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9083{
9084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9085
9086 IEM_MC_BEGIN(3, 1);
9087 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9088 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9091
9092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9094
9095 IEM_MC_PREPARE_FPU_USAGE();
9096 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
9097 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9098 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9099 IEM_MC_ELSE()
9100 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9101 IEM_MC_ENDIF();
9102 IEM_MC_ADVANCE_RIP();
9103
9104 IEM_MC_END();
9105 return VINF_SUCCESS;
9106}
9107
9108
9109/** Opcode 0xdc 11/0. */
9110FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9111{
9112 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9113 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9114}
9115
9116
9117/** Opcode 0xdc 11/1. */
9118FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9119{
9120 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9121 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9122}
9123
9124
9125/** Opcode 0xdc 11/4. */
9126FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9127{
9128 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9129 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9130}
9131
9132
9133/** Opcode 0xdc 11/5. */
9134FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9135{
9136 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9137 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9138}
9139
9140
9141/** Opcode 0xdc 11/6. */
9142FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9143{
9144 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9145 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9146}
9147
9148
9149/** Opcode 0xdc 11/7. */
9150FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9151{
9152 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9153 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9154}
9155
9156
9157/**
9158 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9159 * memory operand, and storing the result in ST0.
9160 *
9161 * @param bRm Mod R/M byte.
9162 * @param pfnImpl Pointer to the instruction implementation (assembly).
9163 */
9164FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9165{
9166 IEM_MC_BEGIN(3, 3);
9167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9168 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9169 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9170 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9171 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9172 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9173
9174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9176 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9177 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9178
9179 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9180 IEM_MC_PREPARE_FPU_USAGE();
9181 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9182 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9183 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9184 IEM_MC_ELSE()
9185 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9186 IEM_MC_ENDIF();
9187 IEM_MC_ADVANCE_RIP();
9188
9189 IEM_MC_END();
9190 return VINF_SUCCESS;
9191}
9192
9193
9194/** Opcode 0xdc !11/0. */
9195FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9196{
9197 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9198 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9199}
9200
9201
9202/** Opcode 0xdc !11/1. */
9203FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9204{
9205 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9206 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9207}
9208
9209
9210/** Opcode 0xdc !11/2. */
9211FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9212{
9213 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9214
9215 IEM_MC_BEGIN(3, 3);
9216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9217 IEM_MC_LOCAL(uint16_t, u16Fsw);
9218 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9219 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9221 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9222
9223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9225
9226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9227 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9228 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9229
9230 IEM_MC_PREPARE_FPU_USAGE();
9231 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9232 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9233 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9234 IEM_MC_ELSE()
9235 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9236 IEM_MC_ENDIF();
9237 IEM_MC_ADVANCE_RIP();
9238
9239 IEM_MC_END();
9240 return VINF_SUCCESS;
9241}
9242
9243
9244/** Opcode 0xdc !11/3. */
9245FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9246{
9247 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9248
9249 IEM_MC_BEGIN(3, 3);
9250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9251 IEM_MC_LOCAL(uint16_t, u16Fsw);
9252 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9253 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9254 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9255 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9256
9257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9259
9260 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9261 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9262 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9263
9264 IEM_MC_PREPARE_FPU_USAGE();
9265 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9266 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9267 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9268 IEM_MC_ELSE()
9269 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9270 IEM_MC_ENDIF();
9271 IEM_MC_ADVANCE_RIP();
9272
9273 IEM_MC_END();
9274 return VINF_SUCCESS;
9275}
9276
9277
9278/** Opcode 0xdc !11/4. */
9279FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9280{
9281 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9282 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9283}
9284
9285
9286/** Opcode 0xdc !11/5. */
9287FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9288{
9289 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9290 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9291}
9292
9293
9294/** Opcode 0xdc !11/6. */
9295FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9296{
9297 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9298 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9299}
9300
9301
9302/** Opcode 0xdc !11/7. */
9303FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9304{
9305 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9306 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9307}
9308
9309
9310/**
9311 * @opcode 0xdc
9312 */
9313FNIEMOP_DEF(iemOp_EscF4)
9314{
9315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9316 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9317 if (IEM_IS_MODRM_REG_MODE(bRm))
9318 {
9319 switch (IEM_GET_MODRM_REG_8(bRm))
9320 {
9321 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9322 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9323 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9324 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9325 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9326 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9327 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9328 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9330 }
9331 }
9332 else
9333 {
9334 switch (IEM_GET_MODRM_REG_8(bRm))
9335 {
9336 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9337 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9338 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9339 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9340 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9341 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9342 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9343 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9345 }
9346 }
9347}
9348
9349
9350/** Opcode 0xdd !11/0.
9351 * @sa iemOp_fld_m32r */
9352FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9353{
9354 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9355
9356 IEM_MC_BEGIN(2, 3);
9357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9358 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9359 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9360 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9361 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9362
9363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9365 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9366 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9367
9368 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9369 IEM_MC_PREPARE_FPU_USAGE();
9370 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9371 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
9372 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9373 IEM_MC_ELSE()
9374 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9375 IEM_MC_ENDIF();
9376 IEM_MC_ADVANCE_RIP();
9377
9378 IEM_MC_END();
9379 return VINF_SUCCESS;
9380}
9381
9382
9383/** Opcode 0xdd !11/0. */
9384FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9385{
9386 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9387 IEM_MC_BEGIN(3, 2);
9388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9389 IEM_MC_LOCAL(uint16_t, u16Fsw);
9390 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9391 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9393
9394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9396 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9397 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9398
9399 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9400 IEM_MC_PREPARE_FPU_USAGE();
9401 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9402 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9403 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9404 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9405 IEM_MC_ELSE()
9406 IEM_MC_IF_FCW_IM()
9407 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9408 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9409 IEM_MC_ENDIF();
9410 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9411 IEM_MC_ENDIF();
9412 IEM_MC_ADVANCE_RIP();
9413
9414 IEM_MC_END();
9415 return VINF_SUCCESS;
9416}
9417
9418
9419/** Opcode 0xdd !11/0. */
9420FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9421{
9422 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9423 IEM_MC_BEGIN(3, 2);
9424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9425 IEM_MC_LOCAL(uint16_t, u16Fsw);
9426 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9427 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9428 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9429
9430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9432 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9433 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9434
9435 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9436 IEM_MC_PREPARE_FPU_USAGE();
9437 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9438 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9439 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9440 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9441 IEM_MC_ELSE()
9442 IEM_MC_IF_FCW_IM()
9443 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9444 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9445 IEM_MC_ENDIF();
9446 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9447 IEM_MC_ENDIF();
9448 IEM_MC_ADVANCE_RIP();
9449
9450 IEM_MC_END();
9451 return VINF_SUCCESS;
9452}
9453
9454
9455
9456
9457/** Opcode 0xdd !11/0. */
9458FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9459{
9460 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9461 IEM_MC_BEGIN(3, 2);
9462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9463 IEM_MC_LOCAL(uint16_t, u16Fsw);
9464 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9465 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9466 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9467
9468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9471 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9472
9473 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9474 IEM_MC_PREPARE_FPU_USAGE();
9475 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9476 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9477 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9478 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9479 IEM_MC_ELSE()
9480 IEM_MC_IF_FCW_IM()
9481 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9482 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9483 IEM_MC_ENDIF();
9484 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9485 IEM_MC_ENDIF();
9486 IEM_MC_ADVANCE_RIP();
9487
9488 IEM_MC_END();
9489 return VINF_SUCCESS;
9490}
9491
9492
9493/** Opcode 0xdd !11/0. */
9494FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9495{
9496 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9497 IEM_MC_BEGIN(3, 0);
9498 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9499 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9500 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9504 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9505 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9506 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9507 IEM_MC_END();
9508 return VINF_SUCCESS;
9509}
9510
9511
9512/** Opcode 0xdd !11/0. */
9513FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9514{
9515 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9516 IEM_MC_BEGIN(3, 0);
9517 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9518 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9519 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9523 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
9524 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9525 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9526 IEM_MC_END();
9527 return VINF_SUCCESS;
9528
9529}
9530
9531/** Opcode 0xdd !11/0. */
9532FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9533{
9534 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9535
9536 IEM_MC_BEGIN(0, 2);
9537 IEM_MC_LOCAL(uint16_t, u16Tmp);
9538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9539
9540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9543
9544 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9545 IEM_MC_FETCH_FSW(u16Tmp);
9546 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9547 IEM_MC_ADVANCE_RIP();
9548
9549/** @todo Debug / drop a hint to the verifier that things may differ
9550 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9551 * NT4SP1. (X86_FSW_PE) */
9552 IEM_MC_END();
9553 return VINF_SUCCESS;
9554}
9555
9556
9557/** Opcode 0xdd 11/0. */
9558FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9559{
9560 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9563 unmodified. */
9564
9565 IEM_MC_BEGIN(0, 0);
9566
9567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9569
9570 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9571 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9572 IEM_MC_UPDATE_FPU_OPCODE_IP();
9573
9574 IEM_MC_ADVANCE_RIP();
9575 IEM_MC_END();
9576 return VINF_SUCCESS;
9577}
9578
9579
9580/** Opcode 0xdd 11/1. */
9581FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9582{
9583 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9585
9586 IEM_MC_BEGIN(0, 2);
9587 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9588 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9591
9592 IEM_MC_PREPARE_FPU_USAGE();
9593 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9594 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9595 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9596 IEM_MC_ELSE()
9597 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9598 IEM_MC_ENDIF();
9599
9600 IEM_MC_ADVANCE_RIP();
9601 IEM_MC_END();
9602 return VINF_SUCCESS;
9603}
9604
9605
9606/** Opcode 0xdd 11/3. */
9607FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9608{
9609 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9610 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9611}
9612
9613
9614/** Opcode 0xdd 11/4. */
9615FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9616{
9617 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9618 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9619}
9620
9621
9622/**
9623 * @opcode 0xdd
9624 */
9625FNIEMOP_DEF(iemOp_EscF5)
9626{
9627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9628 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9629 if (IEM_IS_MODRM_REG_MODE(bRm))
9630 {
9631 switch (IEM_GET_MODRM_REG_8(bRm))
9632 {
9633 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9634 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9635 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9636 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9637 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9638 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9639 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9640 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9642 }
9643 }
9644 else
9645 {
9646 switch (IEM_GET_MODRM_REG_8(bRm))
9647 {
9648 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9649 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9650 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9651 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9652 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9653 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9654 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9655 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9657 }
9658 }
9659}
9660
9661
9662/** Opcode 0xde 11/0. */
9663FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9664{
9665 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9666 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9667}
9668
9669
9670/** Opcode 0xde 11/0. */
9671FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9672{
9673 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9674 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9675}
9676
9677
9678/** Opcode 0xde 0xd9. */
9679FNIEMOP_DEF(iemOp_fcompp)
9680{
9681 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9682 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9683}
9684
9685
9686/** Opcode 0xde 11/4. */
9687FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9688{
9689 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9690 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9691}
9692
9693
9694/** Opcode 0xde 11/5. */
9695FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9696{
9697 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9698 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9699}
9700
9701
9702/** Opcode 0xde 11/6. */
9703FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9704{
9705 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9706 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9707}
9708
9709
9710/** Opcode 0xde 11/7. */
9711FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9712{
9713 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9714 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9715}
9716
9717
9718/**
9719 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9720 * the result in ST0.
9721 *
9722 * @param bRm Mod R/M byte.
9723 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9724 */
9725FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9726{
9727 IEM_MC_BEGIN(3, 3);
9728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9729 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9730 IEM_MC_LOCAL(int16_t, i16Val2);
9731 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9732 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9733 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9734
9735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9737
9738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9740 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9741
9742 IEM_MC_PREPARE_FPU_USAGE();
9743 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9744 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9745 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9746 IEM_MC_ELSE()
9747 IEM_MC_FPU_STACK_UNDERFLOW(0);
9748 IEM_MC_ENDIF();
9749 IEM_MC_ADVANCE_RIP();
9750
9751 IEM_MC_END();
9752 return VINF_SUCCESS;
9753}
9754
9755
9756/** Opcode 0xde !11/0. */
9757FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9758{
9759 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9760 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9761}
9762
9763
9764/** Opcode 0xde !11/1. */
9765FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9766{
9767 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9768 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9769}
9770
9771
9772/** Opcode 0xde !11/2. */
9773FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9774{
9775 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9776
9777 IEM_MC_BEGIN(3, 3);
9778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9779 IEM_MC_LOCAL(uint16_t, u16Fsw);
9780 IEM_MC_LOCAL(int16_t, i16Val2);
9781 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9782 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9783 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9784
9785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9787
9788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9790 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9791
9792 IEM_MC_PREPARE_FPU_USAGE();
9793 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9794 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9795 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9796 IEM_MC_ELSE()
9797 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9798 IEM_MC_ENDIF();
9799 IEM_MC_ADVANCE_RIP();
9800
9801 IEM_MC_END();
9802 return VINF_SUCCESS;
9803}
9804
9805
9806/** Opcode 0xde !11/3. */
9807FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9808{
9809 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9810
9811 IEM_MC_BEGIN(3, 3);
9812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9813 IEM_MC_LOCAL(uint16_t, u16Fsw);
9814 IEM_MC_LOCAL(int16_t, i16Val2);
9815 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9816 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9817 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9818
9819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9821
9822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9824 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9825
9826 IEM_MC_PREPARE_FPU_USAGE();
9827 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9828 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9829 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9830 IEM_MC_ELSE()
9831 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9832 IEM_MC_ENDIF();
9833 IEM_MC_ADVANCE_RIP();
9834
9835 IEM_MC_END();
9836 return VINF_SUCCESS;
9837}
9838
9839
9840/** Opcode 0xde !11/4. */
9841FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9842{
9843 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9844 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9845}
9846
9847
9848/** Opcode 0xde !11/5. */
9849FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9850{
9851 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9852 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9853}
9854
9855
9856/** Opcode 0xde !11/6. */
9857FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9858{
9859 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9860 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9861}
9862
9863
9864/** Opcode 0xde !11/7. */
9865FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9866{
9867 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9868 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9869}
9870
9871
9872/**
9873 * @opcode 0xde
9874 */
9875FNIEMOP_DEF(iemOp_EscF6)
9876{
9877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9878 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9879 if (IEM_IS_MODRM_REG_MODE(bRm))
9880 {
9881 switch (IEM_GET_MODRM_REG_8(bRm))
9882 {
9883 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9884 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9885 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9886 case 3: if (bRm == 0xd9)
9887 return FNIEMOP_CALL(iemOp_fcompp);
9888 return IEMOP_RAISE_INVALID_OPCODE();
9889 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9890 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9891 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9892 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9894 }
9895 }
9896 else
9897 {
9898 switch (IEM_GET_MODRM_REG_8(bRm))
9899 {
9900 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9901 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9902 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9903 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9904 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9905 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9906 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9907 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9909 }
9910 }
9911}
9912
9913
9914/** Opcode 0xdf 11/0.
9915 * Undocument instruction, assumed to work like ffree + fincstp. */
9916FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9917{
9918 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9920
9921 IEM_MC_BEGIN(0, 0);
9922
9923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9925
9926 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9927 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9928 IEM_MC_FPU_STACK_INC_TOP();
9929 IEM_MC_UPDATE_FPU_OPCODE_IP();
9930
9931 IEM_MC_ADVANCE_RIP();
9932 IEM_MC_END();
9933 return VINF_SUCCESS;
9934}
9935
9936
9937/** Opcode 0xdf 0xe0. */
9938FNIEMOP_DEF(iemOp_fnstsw_ax)
9939{
9940 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9942
9943 IEM_MC_BEGIN(0, 1);
9944 IEM_MC_LOCAL(uint16_t, u16Tmp);
9945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9946 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9947 IEM_MC_FETCH_FSW(u16Tmp);
9948 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9949 IEM_MC_ADVANCE_RIP();
9950 IEM_MC_END();
9951 return VINF_SUCCESS;
9952}
9953
9954
9955/** Opcode 0xdf 11/5. */
9956FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9957{
9958 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9959 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9960}
9961
9962
9963/** Opcode 0xdf 11/6. */
9964FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9965{
9966 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9967 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9968}
9969
9970
9971/** Opcode 0xdf !11/0. */
9972FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9973{
9974 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9975
9976 IEM_MC_BEGIN(2, 3);
9977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9978 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9979 IEM_MC_LOCAL(int16_t, i16Val);
9980 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9981 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9982
9983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9985
9986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9987 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9988 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9989
9990 IEM_MC_PREPARE_FPU_USAGE();
9991 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9992 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
9993 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9994 IEM_MC_ELSE()
9995 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9996 IEM_MC_ENDIF();
9997 IEM_MC_ADVANCE_RIP();
9998
9999 IEM_MC_END();
10000 return VINF_SUCCESS;
10001}
10002
10003
10004/** Opcode 0xdf !11/1. */
10005FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10006{
10007 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10008 IEM_MC_BEGIN(3, 2);
10009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10010 IEM_MC_LOCAL(uint16_t, u16Fsw);
10011 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10012 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10013 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10014
10015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10019
10020 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10021 IEM_MC_PREPARE_FPU_USAGE();
10022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10023 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10024 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10025 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10026 IEM_MC_ELSE()
10027 IEM_MC_IF_FCW_IM()
10028 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10029 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10030 IEM_MC_ENDIF();
10031 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10032 IEM_MC_ENDIF();
10033 IEM_MC_ADVANCE_RIP();
10034
10035 IEM_MC_END();
10036 return VINF_SUCCESS;
10037}
10038
10039
10040/** Opcode 0xdf !11/2. */
10041FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10042{
10043 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10044 IEM_MC_BEGIN(3, 2);
10045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10046 IEM_MC_LOCAL(uint16_t, u16Fsw);
10047 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10048 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10049 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10050
10051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10055
10056 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10057 IEM_MC_PREPARE_FPU_USAGE();
10058 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10059 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10060 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10061 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10062 IEM_MC_ELSE()
10063 IEM_MC_IF_FCW_IM()
10064 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10065 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10066 IEM_MC_ENDIF();
10067 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10068 IEM_MC_ENDIF();
10069 IEM_MC_ADVANCE_RIP();
10070
10071 IEM_MC_END();
10072 return VINF_SUCCESS;
10073}
10074
10075
10076/** Opcode 0xdf !11/3. */
10077FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10078{
10079 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10080 IEM_MC_BEGIN(3, 2);
10081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10082 IEM_MC_LOCAL(uint16_t, u16Fsw);
10083 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10084 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10085 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10086
10087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10091
10092 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10093 IEM_MC_PREPARE_FPU_USAGE();
10094 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10095 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10096 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10097 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10098 IEM_MC_ELSE()
10099 IEM_MC_IF_FCW_IM()
10100 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10101 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10102 IEM_MC_ENDIF();
10103 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10104 IEM_MC_ENDIF();
10105 IEM_MC_ADVANCE_RIP();
10106
10107 IEM_MC_END();
10108 return VINF_SUCCESS;
10109}
10110
10111
10112/** Opcode 0xdf !11/4. */
10113FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10114{
10115 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10116
10117 IEM_MC_BEGIN(2, 3);
10118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10119 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10120 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10121 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10122 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10123
10124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10126
10127 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10128 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10129 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10130
10131 IEM_MC_PREPARE_FPU_USAGE();
10132 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10133 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10134 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10135 IEM_MC_ELSE()
10136 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10137 IEM_MC_ENDIF();
10138 IEM_MC_ADVANCE_RIP();
10139
10140 IEM_MC_END();
10141 return VINF_SUCCESS;
10142}
10143
10144
10145/** Opcode 0xdf !11/5. */
10146FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10147{
10148 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10149
10150 IEM_MC_BEGIN(2, 3);
10151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10152 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10153 IEM_MC_LOCAL(int64_t, i64Val);
10154 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10155 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10156
10157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10159
10160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10162 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10163
10164 IEM_MC_PREPARE_FPU_USAGE();
10165 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10166 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10167 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10168 IEM_MC_ELSE()
10169 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10170 IEM_MC_ENDIF();
10171 IEM_MC_ADVANCE_RIP();
10172
10173 IEM_MC_END();
10174 return VINF_SUCCESS;
10175}
10176
10177
10178/** Opcode 0xdf !11/6. */
10179FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10180{
10181 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10182 IEM_MC_BEGIN(3, 2);
10183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10184 IEM_MC_LOCAL(uint16_t, u16Fsw);
10185 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10186 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10187 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10188
10189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10193
10194 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10195 IEM_MC_PREPARE_FPU_USAGE();
10196 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10197 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10198 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10199 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10200 IEM_MC_ELSE()
10201 IEM_MC_IF_FCW_IM()
10202 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10203 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10204 IEM_MC_ENDIF();
10205 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10206 IEM_MC_ENDIF();
10207 IEM_MC_ADVANCE_RIP();
10208
10209 IEM_MC_END();
10210 return VINF_SUCCESS;
10211}
10212
10213
10214/** Opcode 0xdf !11/7. */
10215FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10216{
10217 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10218 IEM_MC_BEGIN(3, 2);
10219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10220 IEM_MC_LOCAL(uint16_t, u16Fsw);
10221 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10222 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10223 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10224
10225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10227 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10228 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10229
10230 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10231 IEM_MC_PREPARE_FPU_USAGE();
10232 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10233 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10234 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10235 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10236 IEM_MC_ELSE()
10237 IEM_MC_IF_FCW_IM()
10238 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10239 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10240 IEM_MC_ENDIF();
10241 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10242 IEM_MC_ENDIF();
10243 IEM_MC_ADVANCE_RIP();
10244
10245 IEM_MC_END();
10246 return VINF_SUCCESS;
10247}
10248
10249
10250/**
10251 * @opcode 0xdf
10252 */
10253FNIEMOP_DEF(iemOp_EscF7)
10254{
10255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10256 if (IEM_IS_MODRM_REG_MODE(bRm))
10257 {
10258 switch (IEM_GET_MODRM_REG_8(bRm))
10259 {
10260 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10261 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10262 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10263 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10264 case 4: if (bRm == 0xe0)
10265 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10266 return IEMOP_RAISE_INVALID_OPCODE();
10267 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10268 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10269 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10271 }
10272 }
10273 else
10274 {
10275 switch (IEM_GET_MODRM_REG_8(bRm))
10276 {
10277 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10278 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10279 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10280 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10281 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10282 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10283 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10284 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10286 }
10287 }
10288}
10289
10290
10291/**
10292 * @opcode 0xe0
10293 */
10294FNIEMOP_DEF(iemOp_loopne_Jb)
10295{
10296 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10297 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10300
10301 switch (pVCpu->iem.s.enmEffAddrMode)
10302 {
10303 case IEMMODE_16BIT:
10304 IEM_MC_BEGIN(0,0);
10305 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10306 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10307 IEM_MC_REL_JMP_S8(i8Imm);
10308 } IEM_MC_ELSE() {
10309 IEM_MC_ADVANCE_RIP();
10310 } IEM_MC_ENDIF();
10311 IEM_MC_END();
10312 return VINF_SUCCESS;
10313
10314 case IEMMODE_32BIT:
10315 IEM_MC_BEGIN(0,0);
10316 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10317 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10318 IEM_MC_REL_JMP_S8(i8Imm);
10319 } IEM_MC_ELSE() {
10320 IEM_MC_ADVANCE_RIP();
10321 } IEM_MC_ENDIF();
10322 IEM_MC_END();
10323 return VINF_SUCCESS;
10324
10325 case IEMMODE_64BIT:
10326 IEM_MC_BEGIN(0,0);
10327 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10328 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10329 IEM_MC_REL_JMP_S8(i8Imm);
10330 } IEM_MC_ELSE() {
10331 IEM_MC_ADVANCE_RIP();
10332 } IEM_MC_ENDIF();
10333 IEM_MC_END();
10334 return VINF_SUCCESS;
10335
10336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10337 }
10338}
10339
10340
10341/**
10342 * @opcode 0xe1
10343 */
10344FNIEMOP_DEF(iemOp_loope_Jb)
10345{
10346 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10347 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10350
10351 switch (pVCpu->iem.s.enmEffAddrMode)
10352 {
10353 case IEMMODE_16BIT:
10354 IEM_MC_BEGIN(0,0);
10355 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10356 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10357 IEM_MC_REL_JMP_S8(i8Imm);
10358 } IEM_MC_ELSE() {
10359 IEM_MC_ADVANCE_RIP();
10360 } IEM_MC_ENDIF();
10361 IEM_MC_END();
10362 return VINF_SUCCESS;
10363
10364 case IEMMODE_32BIT:
10365 IEM_MC_BEGIN(0,0);
10366 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10367 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10368 IEM_MC_REL_JMP_S8(i8Imm);
10369 } IEM_MC_ELSE() {
10370 IEM_MC_ADVANCE_RIP();
10371 } IEM_MC_ENDIF();
10372 IEM_MC_END();
10373 return VINF_SUCCESS;
10374
10375 case IEMMODE_64BIT:
10376 IEM_MC_BEGIN(0,0);
10377 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10378 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10379 IEM_MC_REL_JMP_S8(i8Imm);
10380 } IEM_MC_ELSE() {
10381 IEM_MC_ADVANCE_RIP();
10382 } IEM_MC_ENDIF();
10383 IEM_MC_END();
10384 return VINF_SUCCESS;
10385
10386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10387 }
10388}
10389
10390
10391/**
10392 * @opcode 0xe2
10393 */
10394FNIEMOP_DEF(iemOp_loop_Jb)
10395{
10396 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10397 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10399 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10400
10401 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
10402 * using the 32-bit operand size override. How can that be restarted? See
10403 * weird pseudo code in intel manual. */
10404
10405 /** NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10406 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10407 * the loop causes guest crashes, but when logging it's nice to skip a few million
10408 * lines of useless output. */
10409#if defined(LOG_ENABLED)
10410 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10411 switch (pVCpu->iem.s.enmEffAddrMode)
10412 {
10413 case IEMMODE_16BIT:
10414 IEM_MC_BEGIN(0,0);
10415 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10416 IEM_MC_ADVANCE_RIP();
10417 IEM_MC_END();
10418 return VINF_SUCCESS;
10419
10420 case IEMMODE_32BIT:
10421 IEM_MC_BEGIN(0,0);
10422 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10423 IEM_MC_ADVANCE_RIP();
10424 IEM_MC_END();
10425 return VINF_SUCCESS;
10426
10427 case IEMMODE_64BIT:
10428 IEM_MC_BEGIN(0,0);
10429 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10430 IEM_MC_ADVANCE_RIP();
10431 IEM_MC_END();
10432 return VINF_SUCCESS;
10433
10434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10435 }
10436#endif
10437
10438 switch (pVCpu->iem.s.enmEffAddrMode)
10439 {
10440 case IEMMODE_16BIT:
10441 IEM_MC_BEGIN(0,0);
10442
10443 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10444 IEM_MC_IF_CX_IS_NZ() {
10445 IEM_MC_REL_JMP_S8(i8Imm);
10446 } IEM_MC_ELSE() {
10447 IEM_MC_ADVANCE_RIP();
10448 } IEM_MC_ENDIF();
10449 IEM_MC_END();
10450 return VINF_SUCCESS;
10451
10452 case IEMMODE_32BIT:
10453 IEM_MC_BEGIN(0,0);
10454 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10455 IEM_MC_IF_ECX_IS_NZ() {
10456 IEM_MC_REL_JMP_S8(i8Imm);
10457 } IEM_MC_ELSE() {
10458 IEM_MC_ADVANCE_RIP();
10459 } IEM_MC_ENDIF();
10460 IEM_MC_END();
10461 return VINF_SUCCESS;
10462
10463 case IEMMODE_64BIT:
10464 IEM_MC_BEGIN(0,0);
10465 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10466 IEM_MC_IF_RCX_IS_NZ() {
10467 IEM_MC_REL_JMP_S8(i8Imm);
10468 } IEM_MC_ELSE() {
10469 IEM_MC_ADVANCE_RIP();
10470 } IEM_MC_ENDIF();
10471 IEM_MC_END();
10472 return VINF_SUCCESS;
10473
10474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10475 }
10476}
10477
10478
10479/**
10480 * @opcode 0xe3
10481 */
10482FNIEMOP_DEF(iemOp_jecxz_Jb)
10483{
10484 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10485 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10487 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10488
10489 switch (pVCpu->iem.s.enmEffAddrMode)
10490 {
10491 case IEMMODE_16BIT:
10492 IEM_MC_BEGIN(0,0);
10493 IEM_MC_IF_CX_IS_NZ() {
10494 IEM_MC_ADVANCE_RIP();
10495 } IEM_MC_ELSE() {
10496 IEM_MC_REL_JMP_S8(i8Imm);
10497 } IEM_MC_ENDIF();
10498 IEM_MC_END();
10499 return VINF_SUCCESS;
10500
10501 case IEMMODE_32BIT:
10502 IEM_MC_BEGIN(0,0);
10503 IEM_MC_IF_ECX_IS_NZ() {
10504 IEM_MC_ADVANCE_RIP();
10505 } IEM_MC_ELSE() {
10506 IEM_MC_REL_JMP_S8(i8Imm);
10507 } IEM_MC_ENDIF();
10508 IEM_MC_END();
10509 return VINF_SUCCESS;
10510
10511 case IEMMODE_64BIT:
10512 IEM_MC_BEGIN(0,0);
10513 IEM_MC_IF_RCX_IS_NZ() {
10514 IEM_MC_ADVANCE_RIP();
10515 } IEM_MC_ELSE() {
10516 IEM_MC_REL_JMP_S8(i8Imm);
10517 } IEM_MC_ENDIF();
10518 IEM_MC_END();
10519 return VINF_SUCCESS;
10520
10521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10522 }
10523}
10524
10525
10526/** Opcode 0xe4 */
10527FNIEMOP_DEF(iemOp_in_AL_Ib)
10528{
10529 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10530 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10533}
10534
10535
10536/** Opcode 0xe5 */
10537FNIEMOP_DEF(iemOp_in_eAX_Ib)
10538{
10539 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10540 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10542 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10543}
10544
10545
10546/** Opcode 0xe6 */
10547FNIEMOP_DEF(iemOp_out_Ib_AL)
10548{
10549 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10550 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10552 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10553}
10554
10555
10556/** Opcode 0xe7 */
10557FNIEMOP_DEF(iemOp_out_Ib_eAX)
10558{
10559 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10560 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10562 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10563}
10564
10565
10566/**
10567 * @opcode 0xe8
10568 */
10569FNIEMOP_DEF(iemOp_call_Jv)
10570{
10571 IEMOP_MNEMONIC(call_Jv, "call Jv");
10572 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10573 switch (pVCpu->iem.s.enmEffOpSize)
10574 {
10575 case IEMMODE_16BIT:
10576 {
10577 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10578 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10579 }
10580
10581 case IEMMODE_32BIT:
10582 {
10583 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10584 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10585 }
10586
10587 case IEMMODE_64BIT:
10588 {
10589 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10590 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10591 }
10592
10593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10594 }
10595}
10596
10597
10598/**
10599 * @opcode 0xe9
10600 */
10601FNIEMOP_DEF(iemOp_jmp_Jv)
10602{
10603 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10604 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10605 switch (pVCpu->iem.s.enmEffOpSize)
10606 {
10607 case IEMMODE_16BIT:
10608 {
10609 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10610 IEM_MC_BEGIN(0, 0);
10611 IEM_MC_REL_JMP_S16(i16Imm);
10612 IEM_MC_END();
10613 return VINF_SUCCESS;
10614 }
10615
10616 case IEMMODE_64BIT:
10617 case IEMMODE_32BIT:
10618 {
10619 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10620 IEM_MC_BEGIN(0, 0);
10621 IEM_MC_REL_JMP_S32(i32Imm);
10622 IEM_MC_END();
10623 return VINF_SUCCESS;
10624 }
10625
10626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10627 }
10628}
10629
10630
10631/**
10632 * @opcode 0xea
10633 */
10634FNIEMOP_DEF(iemOp_jmp_Ap)
10635{
10636 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10637 IEMOP_HLP_NO_64BIT();
10638
10639 /* Decode the far pointer address and pass it on to the far call C implementation. */
10640 uint32_t offSeg;
10641 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10642 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10643 else
10644 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10645 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10647 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10648}
10649
10650
10651/**
10652 * @opcode 0xeb
10653 */
10654FNIEMOP_DEF(iemOp_jmp_Jb)
10655{
10656 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10657 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10660
10661 IEM_MC_BEGIN(0, 0);
10662 IEM_MC_REL_JMP_S8(i8Imm);
10663 IEM_MC_END();
10664 return VINF_SUCCESS;
10665}
10666
10667
10668/** Opcode 0xec */
10669FNIEMOP_DEF(iemOp_in_AL_DX)
10670{
10671 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10673 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10674}
10675
10676
10677/** Opcode 0xed */
10678FNIEMOP_DEF(iemOp_in_eAX_DX)
10679{
10680 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10682 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10683}
10684
10685
10686/** Opcode 0xee */
10687FNIEMOP_DEF(iemOp_out_DX_AL)
10688{
10689 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10691 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10692}
10693
10694
10695/** Opcode 0xef */
10696FNIEMOP_DEF(iemOp_out_DX_eAX)
10697{
10698 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10700 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10701}
10702
10703
10704/**
10705 * @opcode 0xf0
10706 */
10707FNIEMOP_DEF(iemOp_lock)
10708{
10709 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10710 if (!pVCpu->iem.s.fDisregardLock)
10711 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10712
10713 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10714 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10715}
10716
10717
10718/**
10719 * @opcode 0xf1
10720 */
10721FNIEMOP_DEF(iemOp_int1)
10722{
10723 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10724 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
10725 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10726 * LOADALL memo. Needs some testing. */
10727 IEMOP_HLP_MIN_386();
10728 /** @todo testcase! */
10729 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10730}
10731
10732
10733/**
10734 * @opcode 0xf2
10735 */
10736FNIEMOP_DEF(iemOp_repne)
10737{
10738 /* This overrides any previous REPE prefix. */
10739 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10740 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10741 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10742
10743 /* For the 4 entry opcode tables, REPNZ overrides any previous
10744 REPZ and operand size prefixes. */
10745 pVCpu->iem.s.idxPrefix = 3;
10746
10747 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10748 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10749}
10750
10751
10752/**
10753 * @opcode 0xf3
10754 */
10755FNIEMOP_DEF(iemOp_repe)
10756{
10757 /* This overrides any previous REPNE prefix. */
10758 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10759 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10760 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10761
10762 /* For the 4 entry opcode tables, REPNZ overrides any previous
10763 REPNZ and operand size prefixes. */
10764 pVCpu->iem.s.idxPrefix = 2;
10765
10766 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10767 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10768}
10769
10770
10771/**
10772 * @opcode 0xf4
10773 */
10774FNIEMOP_DEF(iemOp_hlt)
10775{
10776 IEMOP_MNEMONIC(hlt, "hlt");
10777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10778 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10779}
10780
10781
10782/**
10783 * @opcode 0xf5
10784 */
10785FNIEMOP_DEF(iemOp_cmc)
10786{
10787 IEMOP_MNEMONIC(cmc, "cmc");
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10789 IEM_MC_BEGIN(0, 0);
10790 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10791 IEM_MC_ADVANCE_RIP();
10792 IEM_MC_END();
10793 return VINF_SUCCESS;
10794}
10795
10796
10797/**
10798 * Common implementation of 'inc/dec/not/neg Eb'.
10799 *
10800 * @param bRm The RM byte.
10801 * @param pImpl The instruction implementation.
10802 */
10803FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10804{
10805 if (IEM_IS_MODRM_REG_MODE(bRm))
10806 {
10807 /* register access */
10808 IEM_MC_BEGIN(2, 0);
10809 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10810 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10811 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10812 IEM_MC_REF_EFLAGS(pEFlags);
10813 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10814 IEM_MC_ADVANCE_RIP();
10815 IEM_MC_END();
10816 }
10817 else
10818 {
10819 /* memory access. */
10820 IEM_MC_BEGIN(2, 2);
10821 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10822 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10824
10825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10826 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10827 IEM_MC_FETCH_EFLAGS(EFlags);
10828 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10829 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10830 else
10831 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10832
10833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10834 IEM_MC_COMMIT_EFLAGS(EFlags);
10835 IEM_MC_ADVANCE_RIP();
10836 IEM_MC_END();
10837 }
10838 return VINF_SUCCESS;
10839}
10840
10841
10842/**
10843 * Common implementation of 'inc/dec/not/neg Ev'.
10844 *
10845 * @param bRm The RM byte.
10846 * @param pImpl The instruction implementation.
10847 */
10848FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10849{
10850 /* Registers are handled by a common worker. */
10851 if (IEM_IS_MODRM_REG_MODE(bRm))
10852 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
10853
10854 /* Memory we do here. */
10855 switch (pVCpu->iem.s.enmEffOpSize)
10856 {
10857 case IEMMODE_16BIT:
10858 IEM_MC_BEGIN(2, 2);
10859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10862
10863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10864 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10865 IEM_MC_FETCH_EFLAGS(EFlags);
10866 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10867 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10868 else
10869 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10870
10871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10872 IEM_MC_COMMIT_EFLAGS(EFlags);
10873 IEM_MC_ADVANCE_RIP();
10874 IEM_MC_END();
10875 return VINF_SUCCESS;
10876
10877 case IEMMODE_32BIT:
10878 IEM_MC_BEGIN(2, 2);
10879 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10880 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10882
10883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10884 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10885 IEM_MC_FETCH_EFLAGS(EFlags);
10886 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10887 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10888 else
10889 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10890
10891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10892 IEM_MC_COMMIT_EFLAGS(EFlags);
10893 IEM_MC_ADVANCE_RIP();
10894 IEM_MC_END();
10895 return VINF_SUCCESS;
10896
10897 case IEMMODE_64BIT:
10898 IEM_MC_BEGIN(2, 2);
10899 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10900 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10902
10903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10904 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10905 IEM_MC_FETCH_EFLAGS(EFlags);
10906 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10907 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10908 else
10909 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10910
10911 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10912 IEM_MC_COMMIT_EFLAGS(EFlags);
10913 IEM_MC_ADVANCE_RIP();
10914 IEM_MC_END();
10915 return VINF_SUCCESS;
10916
10917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10918 }
10919}
10920
10921
10922/** Opcode 0xf6 /0. */
10923FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10924{
10925 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10927
10928 if (IEM_IS_MODRM_REG_MODE(bRm))
10929 {
10930 /* register access */
10931 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10933
10934 IEM_MC_BEGIN(3, 0);
10935 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10936 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10937 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10938 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10939 IEM_MC_REF_EFLAGS(pEFlags);
10940 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10941 IEM_MC_ADVANCE_RIP();
10942 IEM_MC_END();
10943 }
10944 else
10945 {
10946 /* memory access. */
10947 IEM_MC_BEGIN(3, 2);
10948 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10949 IEM_MC_ARG(uint8_t, u8Src, 1);
10950 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10952
10953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10954 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10955 IEM_MC_ASSIGN(u8Src, u8Imm);
10956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10957 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10958 IEM_MC_FETCH_EFLAGS(EFlags);
10959 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10960
10961 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10962 IEM_MC_COMMIT_EFLAGS(EFlags);
10963 IEM_MC_ADVANCE_RIP();
10964 IEM_MC_END();
10965 }
10966 return VINF_SUCCESS;
10967}
10968
10969
10970/** Opcode 0xf7 /0. */
10971FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10972{
10973 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10974 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10975
10976 if (IEM_IS_MODRM_REG_MODE(bRm))
10977 {
10978 /* register access */
10979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10980 switch (pVCpu->iem.s.enmEffOpSize)
10981 {
10982 case IEMMODE_16BIT:
10983 {
10984 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10985 IEM_MC_BEGIN(3, 0);
10986 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10987 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10988 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10989 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10990 IEM_MC_REF_EFLAGS(pEFlags);
10991 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10992 IEM_MC_ADVANCE_RIP();
10993 IEM_MC_END();
10994 return VINF_SUCCESS;
10995 }
10996
10997 case IEMMODE_32BIT:
10998 {
10999 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11000 IEM_MC_BEGIN(3, 0);
11001 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11002 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
11003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11004 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11005 IEM_MC_REF_EFLAGS(pEFlags);
11006 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11007 /* No clearing the high dword here - test doesn't write back the result. */
11008 IEM_MC_ADVANCE_RIP();
11009 IEM_MC_END();
11010 return VINF_SUCCESS;
11011 }
11012
11013 case IEMMODE_64BIT:
11014 {
11015 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11016 IEM_MC_BEGIN(3, 0);
11017 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11018 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
11019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11020 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11021 IEM_MC_REF_EFLAGS(pEFlags);
11022 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11023 IEM_MC_ADVANCE_RIP();
11024 IEM_MC_END();
11025 return VINF_SUCCESS;
11026 }
11027
11028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11029 }
11030 }
11031 else
11032 {
11033 /* memory access. */
11034 switch (pVCpu->iem.s.enmEffOpSize)
11035 {
11036 case IEMMODE_16BIT:
11037 {
11038 IEM_MC_BEGIN(3, 2);
11039 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11040 IEM_MC_ARG(uint16_t, u16Src, 1);
11041 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11043
11044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
11045 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11046 IEM_MC_ASSIGN(u16Src, u16Imm);
11047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11048 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11049 IEM_MC_FETCH_EFLAGS(EFlags);
11050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
11051
11052 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
11053 IEM_MC_COMMIT_EFLAGS(EFlags);
11054 IEM_MC_ADVANCE_RIP();
11055 IEM_MC_END();
11056 return VINF_SUCCESS;
11057 }
11058
11059 case IEMMODE_32BIT:
11060 {
11061 IEM_MC_BEGIN(3, 2);
11062 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11063 IEM_MC_ARG(uint32_t, u32Src, 1);
11064 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11066
11067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11068 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11069 IEM_MC_ASSIGN(u32Src, u32Imm);
11070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11071 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11072 IEM_MC_FETCH_EFLAGS(EFlags);
11073 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11074
11075 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
11076 IEM_MC_COMMIT_EFLAGS(EFlags);
11077 IEM_MC_ADVANCE_RIP();
11078 IEM_MC_END();
11079 return VINF_SUCCESS;
11080 }
11081
11082 case IEMMODE_64BIT:
11083 {
11084 IEM_MC_BEGIN(3, 2);
11085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11086 IEM_MC_ARG(uint64_t, u64Src, 1);
11087 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11089
11090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11091 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11092 IEM_MC_ASSIGN(u64Src, u64Imm);
11093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11094 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11095 IEM_MC_FETCH_EFLAGS(EFlags);
11096 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11097
11098 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11099 IEM_MC_COMMIT_EFLAGS(EFlags);
11100 IEM_MC_ADVANCE_RIP();
11101 IEM_MC_END();
11102 return VINF_SUCCESS;
11103 }
11104
11105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11106 }
11107 }
11108}
11109
11110
11111/** Opcode 0xf6 /4, /5, /6 and /7. */
11112FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11113{
11114 if (IEM_IS_MODRM_REG_MODE(bRm))
11115 {
11116 /* register access */
11117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11118 IEM_MC_BEGIN(3, 1);
11119 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11120 IEM_MC_ARG(uint8_t, u8Value, 1);
11121 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11122 IEM_MC_LOCAL(int32_t, rc);
11123
11124 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11125 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11126 IEM_MC_REF_EFLAGS(pEFlags);
11127 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11128 IEM_MC_IF_LOCAL_IS_Z(rc) {
11129 IEM_MC_ADVANCE_RIP();
11130 } IEM_MC_ELSE() {
11131 IEM_MC_RAISE_DIVIDE_ERROR();
11132 } IEM_MC_ENDIF();
11133
11134 IEM_MC_END();
11135 }
11136 else
11137 {
11138 /* memory access. */
11139 IEM_MC_BEGIN(3, 2);
11140 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11141 IEM_MC_ARG(uint8_t, u8Value, 1);
11142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11144 IEM_MC_LOCAL(int32_t, rc);
11145
11146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11148 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11149 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11150 IEM_MC_REF_EFLAGS(pEFlags);
11151 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11152 IEM_MC_IF_LOCAL_IS_Z(rc) {
11153 IEM_MC_ADVANCE_RIP();
11154 } IEM_MC_ELSE() {
11155 IEM_MC_RAISE_DIVIDE_ERROR();
11156 } IEM_MC_ENDIF();
11157
11158 IEM_MC_END();
11159 }
11160 return VINF_SUCCESS;
11161}
11162
11163
11164/** Opcode 0xf7 /4, /5, /6 and /7. */
11165FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11166{
11167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11168
11169 if (IEM_IS_MODRM_REG_MODE(bRm))
11170 {
11171 /* register access */
11172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11173 switch (pVCpu->iem.s.enmEffOpSize)
11174 {
11175 case IEMMODE_16BIT:
11176 {
11177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11178 IEM_MC_BEGIN(4, 1);
11179 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11180 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11181 IEM_MC_ARG(uint16_t, u16Value, 2);
11182 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11183 IEM_MC_LOCAL(int32_t, rc);
11184
11185 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11186 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11187 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11188 IEM_MC_REF_EFLAGS(pEFlags);
11189 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11190 IEM_MC_IF_LOCAL_IS_Z(rc) {
11191 IEM_MC_ADVANCE_RIP();
11192 } IEM_MC_ELSE() {
11193 IEM_MC_RAISE_DIVIDE_ERROR();
11194 } IEM_MC_ENDIF();
11195
11196 IEM_MC_END();
11197 return VINF_SUCCESS;
11198 }
11199
11200 case IEMMODE_32BIT:
11201 {
11202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11203 IEM_MC_BEGIN(4, 1);
11204 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11205 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11206 IEM_MC_ARG(uint32_t, u32Value, 2);
11207 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11208 IEM_MC_LOCAL(int32_t, rc);
11209
11210 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11211 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11212 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11213 IEM_MC_REF_EFLAGS(pEFlags);
11214 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11215 IEM_MC_IF_LOCAL_IS_Z(rc) {
11216 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11217 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11218 IEM_MC_ADVANCE_RIP();
11219 } IEM_MC_ELSE() {
11220 IEM_MC_RAISE_DIVIDE_ERROR();
11221 } IEM_MC_ENDIF();
11222
11223 IEM_MC_END();
11224 return VINF_SUCCESS;
11225 }
11226
11227 case IEMMODE_64BIT:
11228 {
11229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11230 IEM_MC_BEGIN(4, 1);
11231 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11232 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11233 IEM_MC_ARG(uint64_t, u64Value, 2);
11234 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11235 IEM_MC_LOCAL(int32_t, rc);
11236
11237 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11238 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11239 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11240 IEM_MC_REF_EFLAGS(pEFlags);
11241 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11242 IEM_MC_IF_LOCAL_IS_Z(rc) {
11243 IEM_MC_ADVANCE_RIP();
11244 } IEM_MC_ELSE() {
11245 IEM_MC_RAISE_DIVIDE_ERROR();
11246 } IEM_MC_ENDIF();
11247
11248 IEM_MC_END();
11249 return VINF_SUCCESS;
11250 }
11251
11252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11253 }
11254 }
11255 else
11256 {
11257 /* memory access. */
11258 switch (pVCpu->iem.s.enmEffOpSize)
11259 {
11260 case IEMMODE_16BIT:
11261 {
11262 IEM_MC_BEGIN(4, 2);
11263 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11264 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11265 IEM_MC_ARG(uint16_t, u16Value, 2);
11266 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11268 IEM_MC_LOCAL(int32_t, rc);
11269
11270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11272 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11273 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11274 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11275 IEM_MC_REF_EFLAGS(pEFlags);
11276 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11277 IEM_MC_IF_LOCAL_IS_Z(rc) {
11278 IEM_MC_ADVANCE_RIP();
11279 } IEM_MC_ELSE() {
11280 IEM_MC_RAISE_DIVIDE_ERROR();
11281 } IEM_MC_ENDIF();
11282
11283 IEM_MC_END();
11284 return VINF_SUCCESS;
11285 }
11286
11287 case IEMMODE_32BIT:
11288 {
11289 IEM_MC_BEGIN(4, 2);
11290 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11291 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11292 IEM_MC_ARG(uint32_t, u32Value, 2);
11293 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11295 IEM_MC_LOCAL(int32_t, rc);
11296
11297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11299 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11300 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11301 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11302 IEM_MC_REF_EFLAGS(pEFlags);
11303 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11304 IEM_MC_IF_LOCAL_IS_Z(rc) {
11305 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11306 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11307 IEM_MC_ADVANCE_RIP();
11308 } IEM_MC_ELSE() {
11309 IEM_MC_RAISE_DIVIDE_ERROR();
11310 } IEM_MC_ENDIF();
11311
11312 IEM_MC_END();
11313 return VINF_SUCCESS;
11314 }
11315
11316 case IEMMODE_64BIT:
11317 {
11318 IEM_MC_BEGIN(4, 2);
11319 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11320 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11321 IEM_MC_ARG(uint64_t, u64Value, 2);
11322 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11324 IEM_MC_LOCAL(int32_t, rc);
11325
11326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11328 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11329 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11330 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11331 IEM_MC_REF_EFLAGS(pEFlags);
11332 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11333 IEM_MC_IF_LOCAL_IS_Z(rc) {
11334 IEM_MC_ADVANCE_RIP();
11335 } IEM_MC_ELSE() {
11336 IEM_MC_RAISE_DIVIDE_ERROR();
11337 } IEM_MC_ENDIF();
11338
11339 IEM_MC_END();
11340 return VINF_SUCCESS;
11341 }
11342
11343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11344 }
11345 }
11346}
11347
11348/**
11349 * @opcode 0xf6
11350 */
11351FNIEMOP_DEF(iemOp_Grp3_Eb)
11352{
11353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11354 switch (IEM_GET_MODRM_REG_8(bRm))
11355 {
11356 case 0:
11357 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11358 case 1:
11359/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11360 return IEMOP_RAISE_INVALID_OPCODE();
11361 case 2:
11362 IEMOP_MNEMONIC(not_Eb, "not Eb");
11363 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11364 case 3:
11365 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11366 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11367 case 4:
11368 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11369 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11370 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11371 case 5:
11372 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11373 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11374 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11375 case 6:
11376 IEMOP_MNEMONIC(div_Eb, "div Eb");
11377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11378 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11379 case 7:
11380 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11382 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11384 }
11385}
11386
11387
11388/**
11389 * @opcode 0xf7
11390 */
11391FNIEMOP_DEF(iemOp_Grp3_Ev)
11392{
11393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11394 switch (IEM_GET_MODRM_REG_8(bRm))
11395 {
11396 case 0:
11397 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11398 case 1:
11399/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11400 return IEMOP_RAISE_INVALID_OPCODE();
11401 case 2:
11402 IEMOP_MNEMONIC(not_Ev, "not Ev");
11403 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11404 case 3:
11405 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11406 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11407 case 4:
11408 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11410 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11411 case 5:
11412 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11413 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11414 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11415 case 6:
11416 IEMOP_MNEMONIC(div_Ev, "div Ev");
11417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11418 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11419 case 7:
11420 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11421 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11422 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11424 }
11425}
11426
11427
11428/**
11429 * @opcode 0xf8
11430 */
11431FNIEMOP_DEF(iemOp_clc)
11432{
11433 IEMOP_MNEMONIC(clc, "clc");
11434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11435 IEM_MC_BEGIN(0, 0);
11436 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11437 IEM_MC_ADVANCE_RIP();
11438 IEM_MC_END();
11439 return VINF_SUCCESS;
11440}
11441
11442
11443/**
11444 * @opcode 0xf9
11445 */
11446FNIEMOP_DEF(iemOp_stc)
11447{
11448 IEMOP_MNEMONIC(stc, "stc");
11449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11450 IEM_MC_BEGIN(0, 0);
11451 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11452 IEM_MC_ADVANCE_RIP();
11453 IEM_MC_END();
11454 return VINF_SUCCESS;
11455}
11456
11457
11458/**
11459 * @opcode 0xfa
11460 */
11461FNIEMOP_DEF(iemOp_cli)
11462{
11463 IEMOP_MNEMONIC(cli, "cli");
11464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11465 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11466}
11467
11468
11469FNIEMOP_DEF(iemOp_sti)
11470{
11471 IEMOP_MNEMONIC(sti, "sti");
11472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11473 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11474}
11475
11476
11477/**
11478 * @opcode 0xfc
11479 */
11480FNIEMOP_DEF(iemOp_cld)
11481{
11482 IEMOP_MNEMONIC(cld, "cld");
11483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11484 IEM_MC_BEGIN(0, 0);
11485 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11486 IEM_MC_ADVANCE_RIP();
11487 IEM_MC_END();
11488 return VINF_SUCCESS;
11489}
11490
11491
11492/**
11493 * @opcode 0xfd
11494 */
11495FNIEMOP_DEF(iemOp_std)
11496{
11497 IEMOP_MNEMONIC(std, "std");
11498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11499 IEM_MC_BEGIN(0, 0);
11500 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11501 IEM_MC_ADVANCE_RIP();
11502 IEM_MC_END();
11503 return VINF_SUCCESS;
11504}
11505
11506
11507/**
11508 * @opcode 0xfe
11509 */
11510FNIEMOP_DEF(iemOp_Grp4)
11511{
11512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11513 switch (IEM_GET_MODRM_REG_8(bRm))
11514 {
11515 case 0:
11516 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11517 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11518 case 1:
11519 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11520 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11521 default:
11522 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11523 return IEMOP_RAISE_INVALID_OPCODE();
11524 }
11525}
11526
11527
11528/**
11529 * Opcode 0xff /2.
11530 * @param bRm The RM byte.
11531 */
11532FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11533{
11534 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11536
11537 if (IEM_IS_MODRM_REG_MODE(bRm))
11538 {
11539 /* The new RIP is taken from a register. */
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 switch (pVCpu->iem.s.enmEffOpSize)
11542 {
11543 case IEMMODE_16BIT:
11544 IEM_MC_BEGIN(1, 0);
11545 IEM_MC_ARG(uint16_t, u16Target, 0);
11546 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11547 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11548 IEM_MC_END()
11549 return VINF_SUCCESS;
11550
11551 case IEMMODE_32BIT:
11552 IEM_MC_BEGIN(1, 0);
11553 IEM_MC_ARG(uint32_t, u32Target, 0);
11554 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11555 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11556 IEM_MC_END()
11557 return VINF_SUCCESS;
11558
11559 case IEMMODE_64BIT:
11560 IEM_MC_BEGIN(1, 0);
11561 IEM_MC_ARG(uint64_t, u64Target, 0);
11562 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11563 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11564 IEM_MC_END()
11565 return VINF_SUCCESS;
11566
11567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11568 }
11569 }
11570 else
11571 {
11572 /* The new RIP is taken from a register. */
11573 switch (pVCpu->iem.s.enmEffOpSize)
11574 {
11575 case IEMMODE_16BIT:
11576 IEM_MC_BEGIN(1, 1);
11577 IEM_MC_ARG(uint16_t, u16Target, 0);
11578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11582 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11583 IEM_MC_END()
11584 return VINF_SUCCESS;
11585
11586 case IEMMODE_32BIT:
11587 IEM_MC_BEGIN(1, 1);
11588 IEM_MC_ARG(uint32_t, u32Target, 0);
11589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11593 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11594 IEM_MC_END()
11595 return VINF_SUCCESS;
11596
11597 case IEMMODE_64BIT:
11598 IEM_MC_BEGIN(1, 1);
11599 IEM_MC_ARG(uint64_t, u64Target, 0);
11600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11603 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11604 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11605 IEM_MC_END()
11606 return VINF_SUCCESS;
11607
11608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11609 }
11610 }
11611}
11612
11613typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11614
11615FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11616{
11617 /* Registers? How?? */
11618 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
11619 { /* likely */ }
11620 else
11621 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11622
11623 /* Far pointer loaded from memory. */
11624 switch (pVCpu->iem.s.enmEffOpSize)
11625 {
11626 case IEMMODE_16BIT:
11627 IEM_MC_BEGIN(3, 1);
11628 IEM_MC_ARG(uint16_t, u16Sel, 0);
11629 IEM_MC_ARG(uint16_t, offSeg, 1);
11630 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11634 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11635 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11636 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11637 IEM_MC_END();
11638 return VINF_SUCCESS;
11639
11640 case IEMMODE_64BIT:
11641 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11642 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11643 * and call far qword [rsp] encodings. */
11644 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11645 {
11646 IEM_MC_BEGIN(3, 1);
11647 IEM_MC_ARG(uint16_t, u16Sel, 0);
11648 IEM_MC_ARG(uint64_t, offSeg, 1);
11649 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11653 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11654 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11655 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11656 IEM_MC_END();
11657 return VINF_SUCCESS;
11658 }
11659 /* AMD falls thru. */
11660 RT_FALL_THRU();
11661
11662 case IEMMODE_32BIT:
11663 IEM_MC_BEGIN(3, 1);
11664 IEM_MC_ARG(uint16_t, u16Sel, 0);
11665 IEM_MC_ARG(uint32_t, offSeg, 1);
11666 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11670 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11671 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11672 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11673 IEM_MC_END();
11674 return VINF_SUCCESS;
11675
11676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11677 }
11678}
11679
11680
11681/**
11682 * Opcode 0xff /3.
11683 * @param bRm The RM byte.
11684 */
11685FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11686{
11687 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11688 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11689}
11690
11691
11692/**
11693 * Opcode 0xff /4.
11694 * @param bRm The RM byte.
11695 */
11696FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11697{
11698 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11700
11701 if (IEM_IS_MODRM_REG_MODE(bRm))
11702 {
11703 /* The new RIP is taken from a register. */
11704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11705 switch (pVCpu->iem.s.enmEffOpSize)
11706 {
11707 case IEMMODE_16BIT:
11708 IEM_MC_BEGIN(0, 1);
11709 IEM_MC_LOCAL(uint16_t, u16Target);
11710 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11711 IEM_MC_SET_RIP_U16(u16Target);
11712 IEM_MC_END()
11713 return VINF_SUCCESS;
11714
11715 case IEMMODE_32BIT:
11716 IEM_MC_BEGIN(0, 1);
11717 IEM_MC_LOCAL(uint32_t, u32Target);
11718 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11719 IEM_MC_SET_RIP_U32(u32Target);
11720 IEM_MC_END()
11721 return VINF_SUCCESS;
11722
11723 case IEMMODE_64BIT:
11724 IEM_MC_BEGIN(0, 1);
11725 IEM_MC_LOCAL(uint64_t, u64Target);
11726 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11727 IEM_MC_SET_RIP_U64(u64Target);
11728 IEM_MC_END()
11729 return VINF_SUCCESS;
11730
11731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11732 }
11733 }
11734 else
11735 {
11736 /* The new RIP is taken from a memory location. */
11737 switch (pVCpu->iem.s.enmEffOpSize)
11738 {
11739 case IEMMODE_16BIT:
11740 IEM_MC_BEGIN(0, 2);
11741 IEM_MC_LOCAL(uint16_t, u16Target);
11742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11745 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11746 IEM_MC_SET_RIP_U16(u16Target);
11747 IEM_MC_END()
11748 return VINF_SUCCESS;
11749
11750 case IEMMODE_32BIT:
11751 IEM_MC_BEGIN(0, 2);
11752 IEM_MC_LOCAL(uint32_t, u32Target);
11753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11756 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11757 IEM_MC_SET_RIP_U32(u32Target);
11758 IEM_MC_END()
11759 return VINF_SUCCESS;
11760
11761 case IEMMODE_64BIT:
11762 IEM_MC_BEGIN(0, 2);
11763 IEM_MC_LOCAL(uint64_t, u64Target);
11764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11767 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11768 IEM_MC_SET_RIP_U64(u64Target);
11769 IEM_MC_END()
11770 return VINF_SUCCESS;
11771
11772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11773 }
11774 }
11775}
11776
11777
11778/**
11779 * Opcode 0xff /5.
11780 * @param bRm The RM byte.
11781 */
11782FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11783{
11784 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11785 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11786}
11787
11788
11789/**
11790 * Opcode 0xff /6.
11791 * @param bRm The RM byte.
11792 */
11793FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11794{
11795 IEMOP_MNEMONIC(push_Ev, "push Ev");
11796
11797 /* Registers are handled by a common worker. */
11798 if (IEM_IS_MODRM_REG_MODE(bRm))
11799 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
11800
11801 /* Memory we do here. */
11802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11803 switch (pVCpu->iem.s.enmEffOpSize)
11804 {
11805 case IEMMODE_16BIT:
11806 IEM_MC_BEGIN(0, 2);
11807 IEM_MC_LOCAL(uint16_t, u16Src);
11808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11811 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11812 IEM_MC_PUSH_U16(u16Src);
11813 IEM_MC_ADVANCE_RIP();
11814 IEM_MC_END();
11815 return VINF_SUCCESS;
11816
11817 case IEMMODE_32BIT:
11818 IEM_MC_BEGIN(0, 2);
11819 IEM_MC_LOCAL(uint32_t, u32Src);
11820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11823 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11824 IEM_MC_PUSH_U32(u32Src);
11825 IEM_MC_ADVANCE_RIP();
11826 IEM_MC_END();
11827 return VINF_SUCCESS;
11828
11829 case IEMMODE_64BIT:
11830 IEM_MC_BEGIN(0, 2);
11831 IEM_MC_LOCAL(uint64_t, u64Src);
11832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11835 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11836 IEM_MC_PUSH_U64(u64Src);
11837 IEM_MC_ADVANCE_RIP();
11838 IEM_MC_END();
11839 return VINF_SUCCESS;
11840
11841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11842 }
11843}
11844
11845
11846/**
11847 * @opcode 0xff
11848 */
11849FNIEMOP_DEF(iemOp_Grp5)
11850{
11851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11852 switch (IEM_GET_MODRM_REG_8(bRm))
11853 {
11854 case 0:
11855 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11856 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11857 case 1:
11858 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11859 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11860 case 2:
11861 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11862 case 3:
11863 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11864 case 4:
11865 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11866 case 5:
11867 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11868 case 6:
11869 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11870 case 7:
11871 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11872 return IEMOP_RAISE_INVALID_OPCODE();
11873 }
11874 AssertFailedReturn(VERR_IEM_IPE_3);
11875}
11876
11877
11878
11879const PFNIEMOP g_apfnOneByteMap[256] =
11880{
11881 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11882 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11883 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11884 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11885 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11886 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11887 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11888 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11889 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11890 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11891 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11892 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11893 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11894 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11895 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11896 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11897 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11898 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11899 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11900 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11901 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11902 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11903 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11904 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11905 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11906 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11907 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11908 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11909 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11910 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11911 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11912 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11913 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11914 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11915 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11916 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11917 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11918 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11919 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11920 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11921 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11922 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11923 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11924 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11925 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11926 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11927 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11928 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11929 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11930 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11931 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11932 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11933 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11934 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11935 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11936 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11937 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11938 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11939 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11940 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11941 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11942 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11943 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11944 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11945};
11946
11947
11948/** @} */
11949
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette