VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 97405

Last change on this file since 97405 was 97370, checked in by vboxsync, 2 years ago

VMM/IEM: iemRegAddToRipAndClearRF -> iemRegUpdateRipAndFinishClearingRF and made callers use the return code. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 395.4 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 97370 2022-11-02 00:53:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/* Instruction specification format - work in progress: */
60
61/**
62 * @opcode 0x00
63 * @opmnemonic add
64 * @op1 rm:Eb
65 * @op2 reg:Gb
66 * @opmaps one
67 * @openc ModR/M
68 * @opflmodify cf,pf,af,zf,sf,of
69 * @ophints harmless ignores_op_sizes
70 * @opstats add_Eb_Gb
71 * @opgroup og_gen_arith_bin
72 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
73 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
74 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
75 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
76 */
77FNIEMOP_DEF(iemOp_add_Eb_Gb)
78{
79 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
81}
82
83
84/**
85 * @opcode 0x01
86 * @opgroup og_gen_arith_bin
87 * @opflmodify cf,pf,af,zf,sf,of
88 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
89 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
90 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
91 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
92 */
93FNIEMOP_DEF(iemOp_add_Ev_Gv)
94{
95 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
97}
98
99
100/**
101 * @opcode 0x02
102 * @opgroup og_gen_arith_bin
103 * @opflmodify cf,pf,af,zf,sf,of
104 * @opcopytests iemOp_add_Eb_Gb
105 */
106FNIEMOP_DEF(iemOp_add_Gb_Eb)
107{
108 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
110}
111
112
113/**
114 * @opcode 0x03
115 * @opgroup og_gen_arith_bin
116 * @opflmodify cf,pf,af,zf,sf,of
117 * @opcopytests iemOp_add_Ev_Gv
118 */
119FNIEMOP_DEF(iemOp_add_Gv_Ev)
120{
121 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
123}
124
125
126/**
127 * @opcode 0x04
128 * @opgroup og_gen_arith_bin
129 * @opflmodify cf,pf,af,zf,sf,of
130 * @opcopytests iemOp_add_Eb_Gb
131 */
132FNIEMOP_DEF(iemOp_add_Al_Ib)
133{
134 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
136}
137
138
139/**
140 * @opcode 0x05
141 * @opgroup og_gen_arith_bin
142 * @opflmodify cf,pf,af,zf,sf,of
143 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
144 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
145 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
146 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
147 */
148FNIEMOP_DEF(iemOp_add_eAX_Iz)
149{
150 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
152}
153
154
155/**
156 * @opcode 0x06
157 * @opgroup og_stack_sreg
158 */
159FNIEMOP_DEF(iemOp_push_ES)
160{
161 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
162 IEMOP_HLP_NO_64BIT();
163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
164}
165
166
167/**
168 * @opcode 0x07
169 * @opgroup og_stack_sreg
170 */
171FNIEMOP_DEF(iemOp_pop_ES)
172{
173 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
174 IEMOP_HLP_NO_64BIT();
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
177}
178
179
180/**
181 * @opcode 0x08
182 * @opgroup og_gen_arith_bin
183 * @opflmodify cf,pf,af,zf,sf,of
184 * @opflundef af
185 * @opflclear of,cf
186 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
187 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
188 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
189 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
190 */
191FNIEMOP_DEF(iemOp_or_Eb_Gb)
192{
193 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
196}
197
198
199/*
200 * @opcode 0x09
201 * @opgroup og_gen_arith_bin
202 * @opflmodify cf,pf,af,zf,sf,of
203 * @opflundef af
204 * @opflclear of,cf
205 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
206 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
207 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
208 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
209 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
210 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
211 */
212FNIEMOP_DEF(iemOp_or_Ev_Gv)
213{
214 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
217}
218
219
220/**
221 * @opcode 0x0a
222 * @opgroup og_gen_arith_bin
223 * @opflmodify cf,pf,af,zf,sf,of
224 * @opflundef af
225 * @opflclear of,cf
226 * @opcopytests iemOp_or_Eb_Gb
227 */
228FNIEMOP_DEF(iemOp_or_Gb_Eb)
229{
230 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
233}
234
235
236/**
237 * @opcode 0x0b
238 * @opgroup og_gen_arith_bin
239 * @opflmodify cf,pf,af,zf,sf,of
240 * @opflundef af
241 * @opflclear of,cf
242 * @opcopytests iemOp_or_Ev_Gv
243 */
244FNIEMOP_DEF(iemOp_or_Gv_Ev)
245{
246 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0c
254 * @opgroup og_gen_arith_bin
255 * @opflmodify cf,pf,af,zf,sf,of
256 * @opflundef af
257 * @opflclear of,cf
258 * @opcopytests iemOp_or_Eb_Gb
259 */
260FNIEMOP_DEF(iemOp_or_Al_Ib)
261{
262 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
265}
266
267
268/**
269 * @opcode 0x0d
270 * @opgroup og_gen_arith_bin
271 * @opflmodify cf,pf,af,zf,sf,of
272 * @opflundef af
273 * @opflclear of,cf
274 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
275 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
276 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
277 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
278 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
279 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
280 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
281 */
282FNIEMOP_DEF(iemOp_or_eAX_Iz)
283{
284 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
287}
288
289
290/**
291 * @opcode 0x0e
292 * @opgroup og_stack_sreg
293 */
294FNIEMOP_DEF(iemOp_push_CS)
295{
296 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
297 IEMOP_HLP_NO_64BIT();
298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
299}
300
301
302/**
303 * @opcode 0x0f
304 * @opmnemonic EscTwo0f
305 * @openc two0f
306 * @opdisenum OP_2B_ESC
307 * @ophints harmless
308 * @opgroup og_escapes
309 */
310FNIEMOP_DEF(iemOp_2byteEscape)
311{
312#ifdef VBOX_STRICT
313 /* Sanity check the table the first time around. */
314 static bool s_fTested = false;
315 if (RT_LIKELY(s_fTested)) { /* likely */ }
316 else
317 {
318 s_fTested = true;
319 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
320 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
321 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
322 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
323 }
324#endif
325
326 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
327 {
328 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
329 IEMOP_HLP_MIN_286();
330 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
331 }
332 /* @opdone */
333
334 /*
335 * On the 8086 this is a POP CS instruction.
336 * For the time being we don't specify this this.
337 */
338 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
339 IEMOP_HLP_NO_64BIT();
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
342}
343
344/**
345 * @opcode 0x10
346 * @opgroup og_gen_arith_bin
347 * @opfltest cf
348 * @opflmodify cf,pf,af,zf,sf,of
349 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
350 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
351 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
352 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
353 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
354 */
355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
356{
357 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
359}
360
361
362/**
363 * @opcode 0x11
364 * @opgroup og_gen_arith_bin
365 * @opfltest cf
366 * @opflmodify cf,pf,af,zf,sf,of
367 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
368 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
369 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
370 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
371 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
372 */
373FNIEMOP_DEF(iemOp_adc_Ev_Gv)
374{
375 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x12
382 * @opgroup og_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify cf,pf,af,zf,sf,of
385 * @opcopytests iemOp_adc_Eb_Gb
386 */
387FNIEMOP_DEF(iemOp_adc_Gb_Eb)
388{
389 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
391}
392
393
394/**
395 * @opcode 0x13
396 * @opgroup og_gen_arith_bin
397 * @opfltest cf
398 * @opflmodify cf,pf,af,zf,sf,of
399 * @opcopytests iemOp_adc_Ev_Gv
400 */
401FNIEMOP_DEF(iemOp_adc_Gv_Ev)
402{
403 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
405}
406
407
408/**
409 * @opcode 0x14
410 * @opgroup og_gen_arith_bin
411 * @opfltest cf
412 * @opflmodify cf,pf,af,zf,sf,of
413 * @opcopytests iemOp_adc_Eb_Gb
414 */
415FNIEMOP_DEF(iemOp_adc_Al_Ib)
416{
417 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
419}
420
421
422/**
423 * @opcode 0x15
424 * @opgroup og_gen_arith_bin
425 * @opfltest cf
426 * @opflmodify cf,pf,af,zf,sf,of
427 * @opcopytests iemOp_adc_Ev_Gv
428 */
429FNIEMOP_DEF(iemOp_adc_eAX_Iz)
430{
431 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
433}
434
435
436/**
437 * @opcode 0x16
438 */
439FNIEMOP_DEF(iemOp_push_SS)
440{
441 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
442 IEMOP_HLP_NO_64BIT();
443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
444}
445
446
447/**
448 * @opcode 0x17
449 * @opgroup og_gen_arith_bin
450 * @opfltest cf
451 * @opflmodify cf,pf,af,zf,sf,of
452 */
453FNIEMOP_DEF(iemOp_pop_SS)
454{
455 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_HLP_NO_64BIT();
458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
459}
460
461
462/**
463 * @opcode 0x18
464 * @opgroup og_gen_arith_bin
465 * @opfltest cf
466 * @opflmodify cf,pf,af,zf,sf,of
467 */
468FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
469{
470 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
472}
473
474
475/**
476 * @opcode 0x19
477 * @opgroup og_gen_arith_bin
478 * @opfltest cf
479 * @opflmodify cf,pf,af,zf,sf,of
480 */
481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
482{
483 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
485}
486
487
488/**
489 * @opcode 0x1a
490 * @opgroup og_gen_arith_bin
491 * @opfltest cf
492 * @opflmodify cf,pf,af,zf,sf,of
493 */
494FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
495{
496 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
498}
499
500
501/**
502 * @opcode 0x1b
503 * @opgroup og_gen_arith_bin
504 * @opfltest cf
505 * @opflmodify cf,pf,af,zf,sf,of
506 */
507FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
508{
509 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
511}
512
513
514/**
515 * @opcode 0x1c
516 * @opgroup og_gen_arith_bin
517 * @opfltest cf
518 * @opflmodify cf,pf,af,zf,sf,of
519 */
520FNIEMOP_DEF(iemOp_sbb_Al_Ib)
521{
522 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
524}
525
526
527/**
528 * @opcode 0x1d
529 * @opgroup og_gen_arith_bin
530 * @opfltest cf
531 * @opflmodify cf,pf,af,zf,sf,of
532 */
533FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
534{
535 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
537}
538
539
540/**
541 * @opcode 0x1e
542 * @opgroup og_stack_sreg
543 */
544FNIEMOP_DEF(iemOp_push_DS)
545{
546 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
547 IEMOP_HLP_NO_64BIT();
548 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
549}
550
551
552/**
553 * @opcode 0x1f
554 * @opgroup og_stack_sreg
555 */
556FNIEMOP_DEF(iemOp_pop_DS)
557{
558 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEMOP_HLP_NO_64BIT();
561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
562}
563
564
565/**
566 * @opcode 0x20
567 * @opgroup og_gen_arith_bin
568 * @opflmodify cf,pf,af,zf,sf,of
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Eb_Gb)
573{
574 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x21
582 * @opgroup og_gen_arith_bin
583 * @opflmodify cf,pf,af,zf,sf,of
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Ev_Gv)
588{
589 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x22
597 * @opgroup og_gen_arith_bin
598 * @opflmodify cf,pf,af,zf,sf,of
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Gb_Eb)
603{
604 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x23
612 * @opgroup og_gen_arith_bin
613 * @opflmodify cf,pf,af,zf,sf,of
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_Gv_Ev)
618{
619 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x24
627 * @opgroup og_gen_arith_bin
628 * @opflmodify cf,pf,af,zf,sf,of
629 * @opflundef af
630 * @opflclear of,cf
631 */
632FNIEMOP_DEF(iemOp_and_Al_Ib)
633{
634 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
637}
638
639
640/**
641 * @opcode 0x25
642 * @opgroup og_gen_arith_bin
643 * @opflmodify cf,pf,af,zf,sf,of
644 * @opflundef af
645 * @opflclear of,cf
646 */
647FNIEMOP_DEF(iemOp_and_eAX_Iz)
648{
649 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
652}
653
654
655/**
656 * @opcode 0x26
657 * @opmnemonic SEG
658 * @op1 ES
659 * @opgroup og_prefix
660 * @openc prefix
661 * @opdisenum OP_SEG
662 * @ophints harmless
663 */
664FNIEMOP_DEF(iemOp_seg_ES)
665{
666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
668 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
669
670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
672}
673
674
675/**
676 * @opcode 0x27
677 * @opfltest af,cf
678 * @opflmodify cf,pf,af,zf,sf,of
679 * @opflundef of
680 */
681FNIEMOP_DEF(iemOp_daa)
682{
683 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
684 IEMOP_HLP_NO_64BIT();
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
688}
689
690
691/**
692 * @opcode 0x28
693 * @opgroup og_gen_arith_bin
694 * @opflmodify cf,pf,af,zf,sf,of
695 */
696FNIEMOP_DEF(iemOp_sub_Eb_Gb)
697{
698 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
700}
701
702
703/**
704 * @opcode 0x29
705 * @opgroup og_gen_arith_bin
706 * @opflmodify cf,pf,af,zf,sf,of
707 */
708FNIEMOP_DEF(iemOp_sub_Ev_Gv)
709{
710 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
712}
713
714
715/**
716 * @opcode 0x2a
717 * @opgroup og_gen_arith_bin
718 * @opflmodify cf,pf,af,zf,sf,of
719 */
720FNIEMOP_DEF(iemOp_sub_Gb_Eb)
721{
722 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
724}
725
726
727/**
728 * @opcode 0x2b
729 * @opgroup og_gen_arith_bin
730 * @opflmodify cf,pf,af,zf,sf,of
731 */
732FNIEMOP_DEF(iemOp_sub_Gv_Ev)
733{
734 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
736}
737
738
739/**
740 * @opcode 0x2c
741 * @opgroup og_gen_arith_bin
742 * @opflmodify cf,pf,af,zf,sf,of
743 */
744FNIEMOP_DEF(iemOp_sub_Al_Ib)
745{
746 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
748}
749
750
751/**
752 * @opcode 0x2d
753 * @opgroup og_gen_arith_bin
754 * @opflmodify cf,pf,af,zf,sf,of
755 */
756FNIEMOP_DEF(iemOp_sub_eAX_Iz)
757{
758 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
760}
761
762
763/**
764 * @opcode 0x2e
765 * @opmnemonic SEG
766 * @op1 CS
767 * @opgroup og_prefix
768 * @openc prefix
769 * @opdisenum OP_SEG
770 * @ophints harmless
771 */
772FNIEMOP_DEF(iemOp_seg_CS)
773{
774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
775 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
776 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
777
778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
780}
781
782
783/**
784 * @opcode 0x2f
785 * @opfltest af,cf
786 * @opflmodify cf,pf,af,zf,sf,of
787 * @opflundef of
788 */
789FNIEMOP_DEF(iemOp_das)
790{
791 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
796}
797
798
799/**
800 * @opcode 0x30
801 * @opgroup og_gen_arith_bin
802 * @opflmodify cf,pf,af,zf,sf,of
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Eb_Gb)
807{
808 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x31
816 * @opgroup og_gen_arith_bin
817 * @opflmodify cf,pf,af,zf,sf,of
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Ev_Gv)
822{
823 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x32
831 * @opgroup og_gen_arith_bin
832 * @opflmodify cf,pf,af,zf,sf,of
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Gb_Eb)
837{
838 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x33
846 * @opgroup og_gen_arith_bin
847 * @opflmodify cf,pf,af,zf,sf,of
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_Gv_Ev)
852{
853 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x34
861 * @opgroup og_gen_arith_bin
862 * @opflmodify cf,pf,af,zf,sf,of
863 * @opflundef af
864 * @opflclear of,cf
865 */
866FNIEMOP_DEF(iemOp_xor_Al_Ib)
867{
868 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
871}
872
873
874/**
875 * @opcode 0x35
876 * @opgroup og_gen_arith_bin
877 * @opflmodify cf,pf,af,zf,sf,of
878 * @opflundef af
879 * @opflclear of,cf
880 */
881FNIEMOP_DEF(iemOp_xor_eAX_Iz)
882{
883 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
886}
887
888
889/**
890 * @opcode 0x36
891 * @opmnemonic SEG
892 * @op1 SS
893 * @opgroup og_prefix
894 * @openc prefix
895 * @opdisenum OP_SEG
896 * @ophints harmless
897 */
898FNIEMOP_DEF(iemOp_seg_SS)
899{
900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
902 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
903
904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
906}
907
908
909/**
910 * @opcode 0x37
911 * @opfltest af,cf
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef pf,zf,sf,of
914 * @opgroup og_gen_arith_dec
915 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
916 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
917 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
918 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
919 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
920 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
921 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
922 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
925 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
926 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
927 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
928 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
929 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
930 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
931 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
933 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
936 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
937 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
938 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
939 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
940 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
941 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
942 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
943 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
944 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
945 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
946 */
947FNIEMOP_DEF(iemOp_aaa)
948{
949 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
950 IEMOP_HLP_NO_64BIT();
951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
953
954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
955}
956
957
958/**
959 * @opcode 0x38
960 */
961FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
962{
963 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x39
970 */
971FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
972{
973 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3a
980 */
981FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
982{
983 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3b
990 */
991FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
992{
993 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3c
1000 */
1001FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1002{
1003 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3d
1010 */
1011FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1012{
1013 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1015}
1016
1017
1018/**
1019 * @opcode 0x3e
1020 */
1021FNIEMOP_DEF(iemOp_seg_DS)
1022{
1023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1025 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1026
1027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1029}
1030
1031
1032/**
1033 * @opcode 0x3f
1034 * @opfltest af,cf
1035 * @opflmodify cf,pf,af,zf,sf,of
1036 * @opflundef pf,zf,sf,of
1037 * @opgroup og_gen_arith_dec
1038 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1039 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1040 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1041 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1042 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1043 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1045 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1046 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1047 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1048 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1049 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1050 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1051 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1052 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1053 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1058 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1059 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1060 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1061 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1062 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1063 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1064 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1065 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1066 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1067 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1068 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1069 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1070 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1071 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1072 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1073 */
1074FNIEMOP_DEF(iemOp_aas)
1075{
1076 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1077 IEMOP_HLP_NO_64BIT();
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1080
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1082}
1083
1084
1085/**
1086 * Common 'inc/dec/not/neg register' helper.
1087 */
1088FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1089{
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 switch (pVCpu->iem.s.enmEffOpSize)
1092 {
1093 case IEMMODE_16BIT:
1094 IEM_MC_BEGIN(2, 0);
1095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1096 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1097 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1098 IEM_MC_REF_EFLAGS(pEFlags);
1099 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1100 IEM_MC_ADVANCE_RIP_AND_FINISH();
1101 IEM_MC_END();
1102 return VINF_SUCCESS;
1103
1104 case IEMMODE_32BIT:
1105 IEM_MC_BEGIN(2, 0);
1106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1107 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1108 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1109 IEM_MC_REF_EFLAGS(pEFlags);
1110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1112 IEM_MC_ADVANCE_RIP_AND_FINISH();
1113 IEM_MC_END();
1114 return VINF_SUCCESS;
1115
1116 case IEMMODE_64BIT:
1117 IEM_MC_BEGIN(2, 0);
1118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1119 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1120 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1121 IEM_MC_REF_EFLAGS(pEFlags);
1122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1123 IEM_MC_ADVANCE_RIP_AND_FINISH();
1124 IEM_MC_END();
1125 return VINF_SUCCESS;
1126
1127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1128 }
1129}
1130
1131
1132/**
1133 * @opcode 0x40
1134 */
1135FNIEMOP_DEF(iemOp_inc_eAX)
1136{
1137 /*
1138 * This is a REX prefix in 64-bit mode.
1139 */
1140 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1141 {
1142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1144
1145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1147 }
1148
1149 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1150 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1151}
1152
1153
1154/**
1155 * @opcode 0x41
1156 */
1157FNIEMOP_DEF(iemOp_inc_eCX)
1158{
1159 /*
1160 * This is a REX prefix in 64-bit mode.
1161 */
1162 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1163 {
1164 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1165 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1166 pVCpu->iem.s.uRexB = 1 << 3;
1167
1168 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1169 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1170 }
1171
1172 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1173 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1174}
1175
1176
1177/**
1178 * @opcode 0x42
1179 */
1180FNIEMOP_DEF(iemOp_inc_eDX)
1181{
1182 /*
1183 * This is a REX prefix in 64-bit mode.
1184 */
1185 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1186 {
1187 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1188 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1189 pVCpu->iem.s.uRexIndex = 1 << 3;
1190
1191 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1192 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1193 }
1194
1195 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1196 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1197}
1198
1199
1200
1201/**
1202 * @opcode 0x43
1203 */
1204FNIEMOP_DEF(iemOp_inc_eBX)
1205{
1206 /*
1207 * This is a REX prefix in 64-bit mode.
1208 */
1209 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1210 {
1211 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1212 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1213 pVCpu->iem.s.uRexB = 1 << 3;
1214 pVCpu->iem.s.uRexIndex = 1 << 3;
1215
1216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1217 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1218 }
1219
1220 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1221 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1222}
1223
1224
1225/**
1226 * @opcode 0x44
1227 */
1228FNIEMOP_DEF(iemOp_inc_eSP)
1229{
1230 /*
1231 * This is a REX prefix in 64-bit mode.
1232 */
1233 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1234 {
1235 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1237 pVCpu->iem.s.uRexReg = 1 << 3;
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241 }
1242
1243 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1244 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1245}
1246
1247
1248/**
1249 * @opcode 0x45
1250 */
1251FNIEMOP_DEF(iemOp_inc_eBP)
1252{
1253 /*
1254 * This is a REX prefix in 64-bit mode.
1255 */
1256 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1257 {
1258 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1259 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1260 pVCpu->iem.s.uRexReg = 1 << 3;
1261 pVCpu->iem.s.uRexB = 1 << 3;
1262
1263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1264 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1265 }
1266
1267 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1268 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1269}
1270
1271
1272/**
1273 * @opcode 0x46
1274 */
1275FNIEMOP_DEF(iemOp_inc_eSI)
1276{
1277 /*
1278 * This is a REX prefix in 64-bit mode.
1279 */
1280 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1281 {
1282 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1283 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1284 pVCpu->iem.s.uRexReg = 1 << 3;
1285 pVCpu->iem.s.uRexIndex = 1 << 3;
1286
1287 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1288 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1289 }
1290
1291 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1292 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1293}
1294
1295
1296/**
1297 * @opcode 0x47
1298 */
1299FNIEMOP_DEF(iemOp_inc_eDI)
1300{
1301 /*
1302 * This is a REX prefix in 64-bit mode.
1303 */
1304 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1305 {
1306 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1307 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1308 pVCpu->iem.s.uRexReg = 1 << 3;
1309 pVCpu->iem.s.uRexB = 1 << 3;
1310 pVCpu->iem.s.uRexIndex = 1 << 3;
1311
1312 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1313 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1314 }
1315
1316 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1317 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1318}
1319
1320
1321/**
1322 * @opcode 0x48
1323 */
1324FNIEMOP_DEF(iemOp_dec_eAX)
1325{
1326 /*
1327 * This is a REX prefix in 64-bit mode.
1328 */
1329 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1330 {
1331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1332 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1333 iemRecalEffOpSize(pVCpu);
1334
1335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1337 }
1338
1339 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1340 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1341}
1342
1343
1344/**
1345 * @opcode 0x49
1346 */
1347FNIEMOP_DEF(iemOp_dec_eCX)
1348{
1349 /*
1350 * This is a REX prefix in 64-bit mode.
1351 */
1352 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1353 {
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1356 pVCpu->iem.s.uRexB = 1 << 3;
1357 iemRecalEffOpSize(pVCpu);
1358
1359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1361 }
1362
1363 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1364 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1365}
1366
1367
1368/**
1369 * @opcode 0x4a
1370 */
1371FNIEMOP_DEF(iemOp_dec_eDX)
1372{
1373 /*
1374 * This is a REX prefix in 64-bit mode.
1375 */
1376 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1377 {
1378 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1379 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1380 pVCpu->iem.s.uRexIndex = 1 << 3;
1381 iemRecalEffOpSize(pVCpu);
1382
1383 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1384 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1385 }
1386
1387 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1388 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1389}
1390
1391
1392/**
1393 * @opcode 0x4b
1394 */
1395FNIEMOP_DEF(iemOp_dec_eBX)
1396{
1397 /*
1398 * This is a REX prefix in 64-bit mode.
1399 */
1400 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1401 {
1402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1404 pVCpu->iem.s.uRexB = 1 << 3;
1405 pVCpu->iem.s.uRexIndex = 1 << 3;
1406 iemRecalEffOpSize(pVCpu);
1407
1408 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1409 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1410 }
1411
1412 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1413 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1414}
1415
1416
1417/**
1418 * @opcode 0x4c
1419 */
1420FNIEMOP_DEF(iemOp_dec_eSP)
1421{
1422 /*
1423 * This is a REX prefix in 64-bit mode.
1424 */
1425 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1426 {
1427 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1428 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1429 pVCpu->iem.s.uRexReg = 1 << 3;
1430 iemRecalEffOpSize(pVCpu);
1431
1432 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1433 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1434 }
1435
1436 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1437 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1438}
1439
1440
1441/**
1442 * @opcode 0x4d
1443 */
1444FNIEMOP_DEF(iemOp_dec_eBP)
1445{
1446 /*
1447 * This is a REX prefix in 64-bit mode.
1448 */
1449 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1450 {
1451 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1453 pVCpu->iem.s.uRexReg = 1 << 3;
1454 pVCpu->iem.s.uRexB = 1 << 3;
1455 iemRecalEffOpSize(pVCpu);
1456
1457 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1458 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1459 }
1460
1461 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1462 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1463}
1464
1465
1466/**
1467 * @opcode 0x4e
1468 */
1469FNIEMOP_DEF(iemOp_dec_eSI)
1470{
1471 /*
1472 * This is a REX prefix in 64-bit mode.
1473 */
1474 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1475 {
1476 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1477 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1478 pVCpu->iem.s.uRexReg = 1 << 3;
1479 pVCpu->iem.s.uRexIndex = 1 << 3;
1480 iemRecalEffOpSize(pVCpu);
1481
1482 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1483 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1484 }
1485
1486 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1487 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1488}
1489
1490
1491/**
1492 * @opcode 0x4f
1493 */
1494FNIEMOP_DEF(iemOp_dec_eDI)
1495{
1496 /*
1497 * This is a REX prefix in 64-bit mode.
1498 */
1499 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1500 {
1501 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1502 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1503 pVCpu->iem.s.uRexReg = 1 << 3;
1504 pVCpu->iem.s.uRexB = 1 << 3;
1505 pVCpu->iem.s.uRexIndex = 1 << 3;
1506 iemRecalEffOpSize(pVCpu);
1507
1508 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1509 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1510 }
1511
1512 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1513 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1514}
1515
1516
1517/**
1518 * Common 'push register' helper.
1519 */
1520FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1521{
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1524 {
1525 iReg |= pVCpu->iem.s.uRexB;
1526 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1527 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1528 }
1529
1530 switch (pVCpu->iem.s.enmEffOpSize)
1531 {
1532 case IEMMODE_16BIT:
1533 IEM_MC_BEGIN(0, 1);
1534 IEM_MC_LOCAL(uint16_t, u16Value);
1535 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1536 IEM_MC_PUSH_U16(u16Value);
1537 IEM_MC_ADVANCE_RIP_AND_FINISH();
1538 IEM_MC_END();
1539 break;
1540
1541 case IEMMODE_32BIT:
1542 IEM_MC_BEGIN(0, 1);
1543 IEM_MC_LOCAL(uint32_t, u32Value);
1544 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1545 IEM_MC_PUSH_U32(u32Value);
1546 IEM_MC_ADVANCE_RIP_AND_FINISH();
1547 IEM_MC_END();
1548 break;
1549
1550 case IEMMODE_64BIT:
1551 IEM_MC_BEGIN(0, 1);
1552 IEM_MC_LOCAL(uint64_t, u64Value);
1553 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1554 IEM_MC_PUSH_U64(u64Value);
1555 IEM_MC_ADVANCE_RIP_AND_FINISH();
1556 IEM_MC_END();
1557 break;
1558
1559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1560 }
1561}
1562
1563
1564/**
1565 * @opcode 0x50
1566 */
1567FNIEMOP_DEF(iemOp_push_eAX)
1568{
1569 IEMOP_MNEMONIC(push_rAX, "push rAX");
1570 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1571}
1572
1573
1574/**
1575 * @opcode 0x51
1576 */
1577FNIEMOP_DEF(iemOp_push_eCX)
1578{
1579 IEMOP_MNEMONIC(push_rCX, "push rCX");
1580 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1581}
1582
1583
1584/**
1585 * @opcode 0x52
1586 */
1587FNIEMOP_DEF(iemOp_push_eDX)
1588{
1589 IEMOP_MNEMONIC(push_rDX, "push rDX");
1590 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1591}
1592
1593
1594/**
1595 * @opcode 0x53
1596 */
1597FNIEMOP_DEF(iemOp_push_eBX)
1598{
1599 IEMOP_MNEMONIC(push_rBX, "push rBX");
1600 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1601}
1602
1603
1604/**
1605 * @opcode 0x54
1606 */
1607FNIEMOP_DEF(iemOp_push_eSP)
1608{
1609 IEMOP_MNEMONIC(push_rSP, "push rSP");
1610 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1611 {
1612 IEM_MC_BEGIN(0, 1);
1613 IEM_MC_LOCAL(uint16_t, u16Value);
1614 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1615 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1616 IEM_MC_PUSH_U16(u16Value);
1617 IEM_MC_ADVANCE_RIP_AND_FINISH();
1618 IEM_MC_END();
1619 }
1620 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1621}
1622
1623
1624/**
1625 * @opcode 0x55
1626 */
1627FNIEMOP_DEF(iemOp_push_eBP)
1628{
1629 IEMOP_MNEMONIC(push_rBP, "push rBP");
1630 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1631}
1632
1633
1634/**
1635 * @opcode 0x56
1636 */
1637FNIEMOP_DEF(iemOp_push_eSI)
1638{
1639 IEMOP_MNEMONIC(push_rSI, "push rSI");
1640 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1641}
1642
1643
1644/**
1645 * @opcode 0x57
1646 */
1647FNIEMOP_DEF(iemOp_push_eDI)
1648{
1649 IEMOP_MNEMONIC(push_rDI, "push rDI");
1650 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1651}
1652
1653
1654/**
1655 * Common 'pop register' helper.
1656 */
1657FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1658{
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1661 {
1662 iReg |= pVCpu->iem.s.uRexB;
1663 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1664 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1665 }
1666
1667 switch (pVCpu->iem.s.enmEffOpSize)
1668 {
1669 case IEMMODE_16BIT:
1670 IEM_MC_BEGIN(0, 1);
1671 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1672 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1673 IEM_MC_POP_U16(pu16Dst);
1674 IEM_MC_ADVANCE_RIP_AND_FINISH();
1675 IEM_MC_END();
1676 break;
1677
1678 case IEMMODE_32BIT:
1679 IEM_MC_BEGIN(0, 1);
1680 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1681 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1682 IEM_MC_POP_U32(pu32Dst);
1683 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1684 IEM_MC_ADVANCE_RIP_AND_FINISH();
1685 IEM_MC_END();
1686 break;
1687
1688 case IEMMODE_64BIT:
1689 IEM_MC_BEGIN(0, 1);
1690 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1691 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1692 IEM_MC_POP_U64(pu64Dst);
1693 IEM_MC_ADVANCE_RIP_AND_FINISH();
1694 IEM_MC_END();
1695 break;
1696
1697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1698 }
1699}
1700
1701
1702/**
1703 * @opcode 0x58
1704 */
1705FNIEMOP_DEF(iemOp_pop_eAX)
1706{
1707 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1708 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1709}
1710
1711
1712/**
1713 * @opcode 0x59
1714 */
1715FNIEMOP_DEF(iemOp_pop_eCX)
1716{
1717 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1718 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1719}
1720
1721
1722/**
1723 * @opcode 0x5a
1724 */
1725FNIEMOP_DEF(iemOp_pop_eDX)
1726{
1727 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1728 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1729}
1730
1731
1732/**
1733 * @opcode 0x5b
1734 */
1735FNIEMOP_DEF(iemOp_pop_eBX)
1736{
1737 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1738 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1739}
1740
1741
1742/**
1743 * @opcode 0x5c
1744 */
1745FNIEMOP_DEF(iemOp_pop_eSP)
1746{
1747 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1748 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1749 {
1750 if (pVCpu->iem.s.uRexB)
1751 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1752 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1753 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1754 }
1755
1756 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1757 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1758 /** @todo add testcase for this instruction. */
1759 switch (pVCpu->iem.s.enmEffOpSize)
1760 {
1761 case IEMMODE_16BIT:
1762 IEM_MC_BEGIN(0, 1);
1763 IEM_MC_LOCAL(uint16_t, u16Dst);
1764 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1765 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1766 IEM_MC_ADVANCE_RIP_AND_FINISH();
1767 IEM_MC_END();
1768 break;
1769
1770 case IEMMODE_32BIT:
1771 IEM_MC_BEGIN(0, 1);
1772 IEM_MC_LOCAL(uint32_t, u32Dst);
1773 IEM_MC_POP_U32(&u32Dst);
1774 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1775 IEM_MC_ADVANCE_RIP_AND_FINISH();
1776 IEM_MC_END();
1777 break;
1778
1779 case IEMMODE_64BIT:
1780 IEM_MC_BEGIN(0, 1);
1781 IEM_MC_LOCAL(uint64_t, u64Dst);
1782 IEM_MC_POP_U64(&u64Dst);
1783 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1784 IEM_MC_ADVANCE_RIP_AND_FINISH();
1785 IEM_MC_END();
1786 break;
1787
1788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1789 }
1790}
1791
1792
1793/**
1794 * @opcode 0x5d
1795 */
1796FNIEMOP_DEF(iemOp_pop_eBP)
1797{
1798 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1799 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1800}
1801
1802
1803/**
1804 * @opcode 0x5e
1805 */
1806FNIEMOP_DEF(iemOp_pop_eSI)
1807{
1808 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1809 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1810}
1811
1812
1813/**
1814 * @opcode 0x5f
1815 */
1816FNIEMOP_DEF(iemOp_pop_eDI)
1817{
1818 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1819 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1820}
1821
1822
1823/**
1824 * @opcode 0x60
1825 */
1826FNIEMOP_DEF(iemOp_pusha)
1827{
1828 IEMOP_MNEMONIC(pusha, "pusha");
1829 IEMOP_HLP_MIN_186();
1830 IEMOP_HLP_NO_64BIT();
1831 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1833 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1834 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1835}
1836
1837
1838/**
1839 * @opcode 0x61
1840 */
1841FNIEMOP_DEF(iemOp_popa__mvex)
1842{
1843 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1844 {
1845 IEMOP_MNEMONIC(popa, "popa");
1846 IEMOP_HLP_MIN_186();
1847 IEMOP_HLP_NO_64BIT();
1848 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1849 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1850 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1851 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1852 }
1853 IEMOP_MNEMONIC(mvex, "mvex");
1854 Log(("mvex prefix is not supported!\n"));
1855 return IEMOP_RAISE_INVALID_OPCODE();
1856}
1857
1858
1859/**
1860 * @opcode 0x62
1861 * @opmnemonic bound
1862 * @op1 Gv_RO
1863 * @op2 Ma
1864 * @opmincpu 80186
1865 * @ophints harmless invalid_64
1866 * @optest op1=0 op2=0 ->
1867 * @optest op1=1 op2=0 -> value.xcpt=5
1868 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1869 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1870 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1871 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1872 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1873 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1874 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1875 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1876 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1877 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1878 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1879 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1880 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1881 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1882 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1883 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1884 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1885 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1886 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1887 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1888 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1889 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1890 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1891 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1892 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1893 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1894 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1895 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1896 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1897 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1898 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1899 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1900 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1901 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1902 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1903 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1904 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1905 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1906 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1907 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1908 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1909 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1910 */
1911FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1912{
1913 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1914 compatability mode it is invalid with MOD=3.
1915
1916 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1917 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1918 given as R and X without an exact description, so we assume it builds on
1919 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1920 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1921 uint8_t bRm;
1922 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1923 {
1924 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1925 IEMOP_HLP_MIN_186();
1926 IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if (IEM_IS_MODRM_MEM_MODE(bRm))
1928 {
1929 /** @todo testcase: check that there are two memory accesses involved. Check
1930 * whether they're both read before the \#BR triggers. */
1931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1932 {
1933 IEM_MC_BEGIN(3, 1);
1934 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1935 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1936 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1941
1942 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
1943 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1944 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1945
1946 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1947 IEM_MC_END();
1948 }
1949 else /* 32-bit operands */
1950 {
1951 IEM_MC_BEGIN(3, 1);
1952 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1953 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1954 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1956
1957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959
1960 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
1961 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1962 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1963
1964 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1965 IEM_MC_END();
1966 }
1967 }
1968
1969 /*
1970 * @opdone
1971 */
1972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1973 {
1974 /* Note that there is no need for the CPU to fetch further bytes
1975 here because MODRM.MOD == 3. */
1976 Log(("evex not supported by the guest CPU!\n"));
1977 return IEMOP_RAISE_INVALID_OPCODE();
1978 }
1979 }
1980 else
1981 {
1982 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1983 * does modr/m read, whereas AMD probably doesn't... */
1984 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1985 {
1986 Log(("evex not supported by the guest CPU!\n"));
1987 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1988 }
1989 IEM_OPCODE_GET_NEXT_U8(&bRm);
1990 }
1991
1992 IEMOP_MNEMONIC(evex, "evex");
1993 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1994 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1995 Log(("evex prefix is not implemented!\n"));
1996 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1997}
1998
1999
2000/** Opcode 0x63 - non-64-bit modes. */
2001FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2002{
2003 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2004 IEMOP_HLP_MIN_286();
2005 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2007
2008 if (IEM_IS_MODRM_REG_MODE(bRm))
2009 {
2010 /* Register */
2011 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2012 IEM_MC_BEGIN(3, 0);
2013 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2014 IEM_MC_ARG(uint16_t, u16Src, 1);
2015 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2016
2017 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2018 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2019 IEM_MC_REF_EFLAGS(pEFlags);
2020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2021
2022 IEM_MC_ADVANCE_RIP_AND_FINISH();
2023 IEM_MC_END();
2024 }
2025 else
2026 {
2027 /* Memory */
2028 IEM_MC_BEGIN(3, 2);
2029 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2030 IEM_MC_ARG(uint16_t, u16Src, 1);
2031 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2035 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2036 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2037 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2038 IEM_MC_FETCH_EFLAGS(EFlags);
2039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2040
2041 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2042 IEM_MC_COMMIT_EFLAGS(EFlags);
2043 IEM_MC_ADVANCE_RIP_AND_FINISH();
2044 IEM_MC_END();
2045 }
2046}
2047
2048
2049/**
2050 * @opcode 0x63
2051 *
2052 * @note This is a weird one. It works like a regular move instruction if
2053 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2054 * @todo This definitely needs a testcase to verify the odd cases. */
2055FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2056{
2057 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2058
2059 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061
2062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2063 {
2064 if (IEM_IS_MODRM_REG_MODE(bRm))
2065 {
2066 /*
2067 * Register to register.
2068 */
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_BEGIN(0, 1);
2071 IEM_MC_LOCAL(uint64_t, u64Value);
2072 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2073 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 /*
2080 * We're loading a register from memory.
2081 */
2082 IEM_MC_BEGIN(0, 2);
2083 IEM_MC_LOCAL(uint64_t, u64Value);
2084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2088 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2089 IEM_MC_ADVANCE_RIP_AND_FINISH();
2090 IEM_MC_END();
2091 }
2092 }
2093 else
2094 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2095}
2096
2097
2098/**
2099 * @opcode 0x64
2100 * @opmnemonic segfs
2101 * @opmincpu 80386
2102 * @opgroup og_prefixes
2103 */
2104FNIEMOP_DEF(iemOp_seg_FS)
2105{
2106 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2107 IEMOP_HLP_MIN_386();
2108
2109 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2110 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2111
2112 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2113 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2114}
2115
2116
2117/**
2118 * @opcode 0x65
2119 * @opmnemonic seggs
2120 * @opmincpu 80386
2121 * @opgroup og_prefixes
2122 */
2123FNIEMOP_DEF(iemOp_seg_GS)
2124{
2125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2126 IEMOP_HLP_MIN_386();
2127
2128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2129 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2130
2131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2133}
2134
2135
2136/**
2137 * @opcode 0x66
2138 * @opmnemonic opsize
2139 * @openc prefix
2140 * @opmincpu 80386
2141 * @ophints harmless
2142 * @opgroup og_prefixes
2143 */
2144FNIEMOP_DEF(iemOp_op_size)
2145{
2146 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2147 IEMOP_HLP_MIN_386();
2148
2149 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2150 iemRecalEffOpSize(pVCpu);
2151
2152 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2153 when REPZ or REPNZ are present. */
2154 if (pVCpu->iem.s.idxPrefix == 0)
2155 pVCpu->iem.s.idxPrefix = 1;
2156
2157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2159}
2160
2161
2162/**
2163 * @opcode 0x67
2164 * @opmnemonic addrsize
2165 * @openc prefix
2166 * @opmincpu 80386
2167 * @ophints harmless
2168 * @opgroup og_prefixes
2169 */
2170FNIEMOP_DEF(iemOp_addr_size)
2171{
2172 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2173 IEMOP_HLP_MIN_386();
2174
2175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2176 switch (pVCpu->iem.s.enmDefAddrMode)
2177 {
2178 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2179 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2180 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2181 default: AssertFailed();
2182 }
2183
2184 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2185 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2186}
2187
2188
2189/**
2190 * @opcode 0x68
2191 */
2192FNIEMOP_DEF(iemOp_push_Iz)
2193{
2194 IEMOP_MNEMONIC(push_Iz, "push Iz");
2195 IEMOP_HLP_MIN_186();
2196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2197 switch (pVCpu->iem.s.enmEffOpSize)
2198 {
2199 case IEMMODE_16BIT:
2200 {
2201 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2203 IEM_MC_BEGIN(0,0);
2204 IEM_MC_PUSH_U16(u16Imm);
2205 IEM_MC_ADVANCE_RIP_AND_FINISH();
2206 IEM_MC_END();
2207 return VINF_SUCCESS;
2208 }
2209
2210 case IEMMODE_32BIT:
2211 {
2212 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_BEGIN(0,0);
2215 IEM_MC_PUSH_U32(u32Imm);
2216 IEM_MC_ADVANCE_RIP_AND_FINISH();
2217 IEM_MC_END();
2218 return VINF_SUCCESS;
2219 }
2220
2221 case IEMMODE_64BIT:
2222 {
2223 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2225 IEM_MC_BEGIN(0,0);
2226 IEM_MC_PUSH_U64(u64Imm);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 return VINF_SUCCESS;
2230 }
2231
2232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2233 }
2234}
2235
2236
2237/**
2238 * @opcode 0x69
2239 */
2240FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2241{
2242 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2243 IEMOP_HLP_MIN_186();
2244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2246
2247 switch (pVCpu->iem.s.enmEffOpSize)
2248 {
2249 case IEMMODE_16BIT:
2250 {
2251 if (IEM_IS_MODRM_REG_MODE(bRm))
2252 {
2253 /* register operand */
2254 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2256
2257 IEM_MC_BEGIN(3, 1);
2258 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2259 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2261 IEM_MC_LOCAL(uint16_t, u16Tmp);
2262
2263 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2264 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2265 IEM_MC_REF_EFLAGS(pEFlags);
2266 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2267 pu16Dst, u16Src, pEFlags);
2268 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2269
2270 IEM_MC_ADVANCE_RIP_AND_FINISH();
2271 IEM_MC_END();
2272 }
2273 else
2274 {
2275 /* memory operand */
2276 IEM_MC_BEGIN(3, 2);
2277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2278 IEM_MC_ARG(uint16_t, u16Src, 1);
2279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2280 IEM_MC_LOCAL(uint16_t, u16Tmp);
2281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2282
2283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2284 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2285 IEM_MC_ASSIGN(u16Src, u16Imm);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2287 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2288 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2289 IEM_MC_REF_EFLAGS(pEFlags);
2290 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2291 pu16Dst, u16Src, pEFlags);
2292 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2293
2294 IEM_MC_ADVANCE_RIP_AND_FINISH();
2295 IEM_MC_END();
2296 }
2297 return VINF_SUCCESS;
2298 }
2299
2300 case IEMMODE_32BIT:
2301 {
2302 if (IEM_IS_MODRM_REG_MODE(bRm))
2303 {
2304 /* register operand */
2305 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2307
2308 IEM_MC_BEGIN(3, 1);
2309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2310 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2311 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2312 IEM_MC_LOCAL(uint32_t, u32Tmp);
2313
2314 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2315 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2316 IEM_MC_REF_EFLAGS(pEFlags);
2317 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2318 pu32Dst, u32Src, pEFlags);
2319 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2320
2321 IEM_MC_ADVANCE_RIP_AND_FINISH();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 /* memory operand */
2327 IEM_MC_BEGIN(3, 2);
2328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2329 IEM_MC_ARG(uint32_t, u32Src, 1);
2330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2331 IEM_MC_LOCAL(uint32_t, u32Tmp);
2332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2333
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2335 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2336 IEM_MC_ASSIGN(u32Src, u32Imm);
2337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2338 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2339 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2340 IEM_MC_REF_EFLAGS(pEFlags);
2341 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2342 pu32Dst, u32Src, pEFlags);
2343 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348 return VINF_SUCCESS;
2349 }
2350
2351 case IEMMODE_64BIT:
2352 {
2353 if (IEM_IS_MODRM_REG_MODE(bRm))
2354 {
2355 /* register operand */
2356 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2358
2359 IEM_MC_BEGIN(3, 1);
2360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2361 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2362 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2363 IEM_MC_LOCAL(uint64_t, u64Tmp);
2364
2365 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2366 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2367 IEM_MC_REF_EFLAGS(pEFlags);
2368 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2369 pu64Dst, u64Src, pEFlags);
2370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2371
2372 IEM_MC_ADVANCE_RIP_AND_FINISH();
2373 IEM_MC_END();
2374 }
2375 else
2376 {
2377 /* memory operand */
2378 IEM_MC_BEGIN(3, 2);
2379 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2380 IEM_MC_ARG(uint64_t, u64Src, 1);
2381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2382 IEM_MC_LOCAL(uint64_t, u64Tmp);
2383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2384
2385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2386 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2387 IEM_MC_ASSIGN(u64Src, u64Imm);
2388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2389 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2390 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2391 IEM_MC_REF_EFLAGS(pEFlags);
2392 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2393 pu64Dst, u64Src, pEFlags);
2394 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2395
2396 IEM_MC_ADVANCE_RIP_AND_FINISH();
2397 IEM_MC_END();
2398 }
2399 return VINF_SUCCESS;
2400 }
2401
2402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2403 }
2404}
2405
2406
2407/**
2408 * @opcode 0x6a
2409 */
2410FNIEMOP_DEF(iemOp_push_Ib)
2411{
2412 IEMOP_MNEMONIC(push_Ib, "push Ib");
2413 IEMOP_HLP_MIN_186();
2414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2417
2418 IEM_MC_BEGIN(0,0);
2419 switch (pVCpu->iem.s.enmEffOpSize)
2420 {
2421 case IEMMODE_16BIT:
2422 IEM_MC_PUSH_U16(i8Imm);
2423 break;
2424 case IEMMODE_32BIT:
2425 IEM_MC_PUSH_U32(i8Imm);
2426 break;
2427 case IEMMODE_64BIT:
2428 IEM_MC_PUSH_U64(i8Imm);
2429 break;
2430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2431 }
2432 IEM_MC_ADVANCE_RIP_AND_FINISH();
2433 IEM_MC_END();
2434}
2435
2436
2437/**
2438 * @opcode 0x6b
2439 */
2440FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2441{
2442 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2443 IEMOP_HLP_MIN_186();
2444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2446
2447 switch (pVCpu->iem.s.enmEffOpSize)
2448 {
2449 case IEMMODE_16BIT:
2450 if (IEM_IS_MODRM_REG_MODE(bRm))
2451 {
2452 /* register operand */
2453 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2455
2456 IEM_MC_BEGIN(3, 1);
2457 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2458 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2459 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2460 IEM_MC_LOCAL(uint16_t, u16Tmp);
2461
2462 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2463 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2464 IEM_MC_REF_EFLAGS(pEFlags);
2465 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2466 pu16Dst, u16Src, pEFlags);
2467 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2468
2469 IEM_MC_ADVANCE_RIP_AND_FINISH();
2470 IEM_MC_END();
2471 }
2472 else
2473 {
2474 /* memory operand */
2475 IEM_MC_BEGIN(3, 2);
2476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2477 IEM_MC_ARG(uint16_t, u16Src, 1);
2478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2479 IEM_MC_LOCAL(uint16_t, u16Tmp);
2480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2481
2482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2483 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2484 IEM_MC_ASSIGN(u16Src, u16Imm);
2485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2486 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2487 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2488 IEM_MC_REF_EFLAGS(pEFlags);
2489 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2490 pu16Dst, u16Src, pEFlags);
2491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496 return VINF_SUCCESS;
2497
2498 case IEMMODE_32BIT:
2499 if (IEM_IS_MODRM_REG_MODE(bRm))
2500 {
2501 /* register operand */
2502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504
2505 IEM_MC_BEGIN(3, 1);
2506 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2507 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2509 IEM_MC_LOCAL(uint32_t, u32Tmp);
2510
2511 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2512 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2513 IEM_MC_REF_EFLAGS(pEFlags);
2514 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2515 pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP_AND_FINISH();
2519 IEM_MC_END();
2520 }
2521 else
2522 {
2523 /* memory operand */
2524 IEM_MC_BEGIN(3, 2);
2525 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2526 IEM_MC_ARG(uint32_t, u32Src, 1);
2527 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2528 IEM_MC_LOCAL(uint32_t, u32Tmp);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2532 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2533 IEM_MC_ASSIGN(u32Src, u32Imm);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2535 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2536 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2537 IEM_MC_REF_EFLAGS(pEFlags);
2538 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2539 pu32Dst, u32Src, pEFlags);
2540 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2541
2542 IEM_MC_ADVANCE_RIP_AND_FINISH();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546
2547 case IEMMODE_64BIT:
2548 if (IEM_IS_MODRM_REG_MODE(bRm))
2549 {
2550 /* register operand */
2551 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2553
2554 IEM_MC_BEGIN(3, 1);
2555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2556 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2557 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2558 IEM_MC_LOCAL(uint64_t, u64Tmp);
2559
2560 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2561 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2562 IEM_MC_REF_EFLAGS(pEFlags);
2563 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2564 pu64Dst, u64Src, pEFlags);
2565 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2566
2567 IEM_MC_ADVANCE_RIP_AND_FINISH();
2568 IEM_MC_END();
2569 }
2570 else
2571 {
2572 /* memory operand */
2573 IEM_MC_BEGIN(3, 2);
2574 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2575 IEM_MC_ARG(uint64_t, u64Src, 1);
2576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2577 IEM_MC_LOCAL(uint64_t, u64Tmp);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2579
2580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2581 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2582 IEM_MC_ASSIGN(u64Src, u64Imm);
2583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2584 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2585 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2586 IEM_MC_REF_EFLAGS(pEFlags);
2587 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2588 pu64Dst, u64Src, pEFlags);
2589 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2590
2591 IEM_MC_ADVANCE_RIP_AND_FINISH();
2592 IEM_MC_END();
2593 }
2594 return VINF_SUCCESS;
2595
2596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2597 }
2598 AssertFailedReturn(VERR_IEM_IPE_8);
2599}
2600
2601
2602/**
2603 * @opcode 0x6c
2604 */
2605FNIEMOP_DEF(iemOp_insb_Yb_DX)
2606{
2607 IEMOP_HLP_MIN_186();
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2610 {
2611 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2612 switch (pVCpu->iem.s.enmEffAddrMode)
2613 {
2614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2618 }
2619 }
2620 else
2621 {
2622 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2623 switch (pVCpu->iem.s.enmEffAddrMode)
2624 {
2625 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2626 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2627 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2629 }
2630 }
2631}
2632
2633
2634/**
2635 * @opcode 0x6d
2636 */
2637FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2638{
2639 IEMOP_HLP_MIN_186();
2640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2641 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2642 {
2643 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2644 switch (pVCpu->iem.s.enmEffOpSize)
2645 {
2646 case IEMMODE_16BIT:
2647 switch (pVCpu->iem.s.enmEffAddrMode)
2648 {
2649 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2650 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2651 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2653 }
2654 break;
2655 case IEMMODE_64BIT:
2656 case IEMMODE_32BIT:
2657 switch (pVCpu->iem.s.enmEffAddrMode)
2658 {
2659 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2663 }
2664 break;
2665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2666 }
2667 }
2668 else
2669 {
2670 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2671 switch (pVCpu->iem.s.enmEffOpSize)
2672 {
2673 case IEMMODE_16BIT:
2674 switch (pVCpu->iem.s.enmEffAddrMode)
2675 {
2676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2680 }
2681 break;
2682 case IEMMODE_64BIT:
2683 case IEMMODE_32BIT:
2684 switch (pVCpu->iem.s.enmEffAddrMode)
2685 {
2686 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2687 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2688 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2690 }
2691 break;
2692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2693 }
2694 }
2695}
2696
2697
2698/**
2699 * @opcode 0x6e
2700 */
2701FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2702{
2703 IEMOP_HLP_MIN_186();
2704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2705 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2706 {
2707 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2708 switch (pVCpu->iem.s.enmEffAddrMode)
2709 {
2710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2714 }
2715 }
2716 else
2717 {
2718 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2719 switch (pVCpu->iem.s.enmEffAddrMode)
2720 {
2721 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2722 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2723 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2725 }
2726 }
2727}
2728
2729
2730/**
2731 * @opcode 0x6f
2732 */
2733FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2734{
2735 IEMOP_HLP_MIN_186();
2736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2737 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2738 {
2739 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2740 switch (pVCpu->iem.s.enmEffOpSize)
2741 {
2742 case IEMMODE_16BIT:
2743 switch (pVCpu->iem.s.enmEffAddrMode)
2744 {
2745 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2747 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2749 }
2750 break;
2751 case IEMMODE_64BIT:
2752 case IEMMODE_32BIT:
2753 switch (pVCpu->iem.s.enmEffAddrMode)
2754 {
2755 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2757 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2759 }
2760 break;
2761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2762 }
2763 }
2764 else
2765 {
2766 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2767 switch (pVCpu->iem.s.enmEffOpSize)
2768 {
2769 case IEMMODE_16BIT:
2770 switch (pVCpu->iem.s.enmEffAddrMode)
2771 {
2772 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2773 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2774 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2776 }
2777 break;
2778 case IEMMODE_64BIT:
2779 case IEMMODE_32BIT:
2780 switch (pVCpu->iem.s.enmEffAddrMode)
2781 {
2782 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2786 }
2787 break;
2788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2789 }
2790 }
2791}
2792
2793
2794/**
2795 * @opcode 0x70
2796 */
2797FNIEMOP_DEF(iemOp_jo_Jb)
2798{
2799 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2800 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2803
2804 IEM_MC_BEGIN(0, 0);
2805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2806 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2807 } IEM_MC_ELSE() {
2808 IEM_MC_ADVANCE_RIP_AND_FINISH();
2809 } IEM_MC_ENDIF();
2810 IEM_MC_END();
2811}
2812
2813
2814/**
2815 * @opcode 0x71
2816 */
2817FNIEMOP_DEF(iemOp_jno_Jb)
2818{
2819 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2823
2824 IEM_MC_BEGIN(0, 0);
2825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2826 IEM_MC_ADVANCE_RIP_AND_FINISH();
2827 } IEM_MC_ELSE() {
2828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2829 } IEM_MC_ENDIF();
2830 IEM_MC_END();
2831}
2832
2833/**
2834 * @opcode 0x72
2835 */
2836FNIEMOP_DEF(iemOp_jc_Jb)
2837{
2838 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2841 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2842
2843 IEM_MC_BEGIN(0, 0);
2844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2845 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2846 } IEM_MC_ELSE() {
2847 IEM_MC_ADVANCE_RIP_AND_FINISH();
2848 } IEM_MC_ENDIF();
2849 IEM_MC_END();
2850}
2851
2852
2853/**
2854 * @opcode 0x73
2855 */
2856FNIEMOP_DEF(iemOp_jnc_Jb)
2857{
2858 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2859 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2862
2863 IEM_MC_BEGIN(0, 0);
2864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2865 IEM_MC_ADVANCE_RIP_AND_FINISH();
2866 } IEM_MC_ELSE() {
2867 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2868 } IEM_MC_ENDIF();
2869 IEM_MC_END();
2870}
2871
2872
2873/**
2874 * @opcode 0x74
2875 */
2876FNIEMOP_DEF(iemOp_je_Jb)
2877{
2878 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2879 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2882
2883 IEM_MC_BEGIN(0, 0);
2884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2885 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2886 } IEM_MC_ELSE() {
2887 IEM_MC_ADVANCE_RIP_AND_FINISH();
2888 } IEM_MC_ENDIF();
2889 IEM_MC_END();
2890}
2891
2892
2893/**
2894 * @opcode 0x75
2895 */
2896FNIEMOP_DEF(iemOp_jne_Jb)
2897{
2898 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2899 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2902
2903 IEM_MC_BEGIN(0, 0);
2904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2905 IEM_MC_ADVANCE_RIP_AND_FINISH();
2906 } IEM_MC_ELSE() {
2907 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2908 } IEM_MC_ENDIF();
2909 IEM_MC_END();
2910}
2911
2912
2913/**
2914 * @opcode 0x76
2915 */
2916FNIEMOP_DEF(iemOp_jbe_Jb)
2917{
2918 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2919 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2921 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2922
2923 IEM_MC_BEGIN(0, 0);
2924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2925 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2926 } IEM_MC_ELSE() {
2927 IEM_MC_ADVANCE_RIP_AND_FINISH();
2928 } IEM_MC_ENDIF();
2929 IEM_MC_END();
2930}
2931
2932
2933/**
2934 * @opcode 0x77
2935 */
2936FNIEMOP_DEF(iemOp_jnbe_Jb)
2937{
2938 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2945 IEM_MC_ADVANCE_RIP_AND_FINISH();
2946 } IEM_MC_ELSE() {
2947 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950}
2951
2952
2953/**
2954 * @opcode 0x78
2955 */
2956FNIEMOP_DEF(iemOp_js_Jb)
2957{
2958 IEMOP_MNEMONIC(js_Jb, "js Jb");
2959 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2962
2963 IEM_MC_BEGIN(0, 0);
2964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2965 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2966 } IEM_MC_ELSE() {
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 } IEM_MC_ENDIF();
2969 IEM_MC_END();
2970}
2971
2972
2973/**
2974 * @opcode 0x79
2975 */
2976FNIEMOP_DEF(iemOp_jns_Jb)
2977{
2978 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2979 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2982
2983 IEM_MC_BEGIN(0, 0);
2984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2985 IEM_MC_ADVANCE_RIP_AND_FINISH();
2986 } IEM_MC_ELSE() {
2987 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2988 } IEM_MC_ENDIF();
2989 IEM_MC_END();
2990}
2991
2992
2993/**
2994 * @opcode 0x7a
2995 */
2996FNIEMOP_DEF(iemOp_jp_Jb)
2997{
2998 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2999 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3002
3003 IEM_MC_BEGIN(0, 0);
3004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3005 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3006 } IEM_MC_ELSE() {
3007 IEM_MC_ADVANCE_RIP_AND_FINISH();
3008 } IEM_MC_ENDIF();
3009 IEM_MC_END();
3010}
3011
3012
3013/**
3014 * @opcode 0x7b
3015 */
3016FNIEMOP_DEF(iemOp_jnp_Jb)
3017{
3018 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3019 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3022
3023 IEM_MC_BEGIN(0, 0);
3024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3025 IEM_MC_ADVANCE_RIP_AND_FINISH();
3026 } IEM_MC_ELSE() {
3027 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3028 } IEM_MC_ENDIF();
3029 IEM_MC_END();
3030}
3031
3032
3033/**
3034 * @opcode 0x7c
3035 */
3036FNIEMOP_DEF(iemOp_jl_Jb)
3037{
3038 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3039 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3042
3043 IEM_MC_BEGIN(0, 0);
3044 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3045 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3046 } IEM_MC_ELSE() {
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 } IEM_MC_ENDIF();
3049 IEM_MC_END();
3050}
3051
3052
3053/**
3054 * @opcode 0x7d
3055 */
3056FNIEMOP_DEF(iemOp_jnl_Jb)
3057{
3058 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3059 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3062
3063 IEM_MC_BEGIN(0, 0);
3064 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 } IEM_MC_ELSE() {
3067 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3068 } IEM_MC_ENDIF();
3069 IEM_MC_END();
3070}
3071
3072
3073/**
3074 * @opcode 0x7e
3075 */
3076FNIEMOP_DEF(iemOp_jle_Jb)
3077{
3078 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3079 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3081 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3082
3083 IEM_MC_BEGIN(0, 0);
3084 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3085 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3086 } IEM_MC_ELSE() {
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 } IEM_MC_ENDIF();
3089 IEM_MC_END();
3090}
3091
3092
3093/**
3094 * @opcode 0x7f
3095 */
3096FNIEMOP_DEF(iemOp_jnle_Jb)
3097{
3098 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3099 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3102
3103 IEM_MC_BEGIN(0, 0);
3104 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3105 IEM_MC_ADVANCE_RIP_AND_FINISH();
3106 } IEM_MC_ELSE() {
3107 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3108 } IEM_MC_ENDIF();
3109 IEM_MC_END();
3110}
3111
3112
3113/**
3114 * @opcode 0x80
3115 */
3116FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3117{
3118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3119 switch (IEM_GET_MODRM_REG_8(bRm))
3120 {
3121 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3122 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3123 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3124 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3125 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3126 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3127 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3128 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3129 }
3130 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3131
3132 if (IEM_IS_MODRM_REG_MODE(bRm))
3133 {
3134 /* register target */
3135 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_BEGIN(3, 0);
3138 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3139 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3141
3142 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3143 IEM_MC_REF_EFLAGS(pEFlags);
3144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory target */
3152 uint32_t fAccess;
3153 if (pImpl->pfnLockedU8)
3154 fAccess = IEM_ACCESS_DATA_RW;
3155 else /* CMP */
3156 fAccess = IEM_ACCESS_DATA_R;
3157 IEM_MC_BEGIN(3, 2);
3158 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3161
3162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3163 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3164 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3165 if (pImpl->pfnLockedU8)
3166 IEMOP_HLP_DONE_DECODING();
3167 else
3168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3169
3170 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3171 IEM_MC_FETCH_EFLAGS(EFlags);
3172 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3174 else
3175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3176
3177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3178 IEM_MC_COMMIT_EFLAGS(EFlags);
3179 IEM_MC_ADVANCE_RIP_AND_FINISH();
3180 IEM_MC_END();
3181 }
3182}
3183
3184
3185/**
3186 * @opcode 0x81
3187 */
3188FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 switch (IEM_GET_MODRM_REG_8(bRm))
3192 {
3193 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3194 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3195 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3196 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3197 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3198 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3199 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3200 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3201 }
3202 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3203
3204 switch (pVCpu->iem.s.enmEffOpSize)
3205 {
3206 case IEMMODE_16BIT:
3207 {
3208 if (IEM_IS_MODRM_REG_MODE(bRm))
3209 {
3210 /* register target */
3211 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213 IEM_MC_BEGIN(3, 0);
3214 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3215 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3217
3218 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3219 IEM_MC_REF_EFLAGS(pEFlags);
3220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3221
3222 IEM_MC_ADVANCE_RIP_AND_FINISH();
3223 IEM_MC_END();
3224 }
3225 else
3226 {
3227 /* memory target */
3228 uint32_t fAccess;
3229 if (pImpl->pfnLockedU16)
3230 fAccess = IEM_ACCESS_DATA_RW;
3231 else /* CMP, TEST */
3232 fAccess = IEM_ACCESS_DATA_R;
3233 IEM_MC_BEGIN(3, 2);
3234 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3235 IEM_MC_ARG(uint16_t, u16Src, 1);
3236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3238
3239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3240 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3241 IEM_MC_ASSIGN(u16Src, u16Imm);
3242 if (pImpl->pfnLockedU16)
3243 IEMOP_HLP_DONE_DECODING();
3244 else
3245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3246 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3247 IEM_MC_FETCH_EFLAGS(EFlags);
3248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3250 else
3251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3252
3253 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3254 IEM_MC_COMMIT_EFLAGS(EFlags);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 break;
3259 }
3260
3261 case IEMMODE_32BIT:
3262 {
3263 if (IEM_IS_MODRM_REG_MODE(bRm))
3264 {
3265 /* register target */
3266 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3268 IEM_MC_BEGIN(3, 0);
3269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3270 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3272
3273 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3274 IEM_MC_REF_EFLAGS(pEFlags);
3275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3276 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3277 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3278
3279 IEM_MC_ADVANCE_RIP_AND_FINISH();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /* memory target */
3285 uint32_t fAccess;
3286 if (pImpl->pfnLockedU32)
3287 fAccess = IEM_ACCESS_DATA_RW;
3288 else /* CMP, TEST */
3289 fAccess = IEM_ACCESS_DATA_R;
3290 IEM_MC_BEGIN(3, 2);
3291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3292 IEM_MC_ARG(uint32_t, u32Src, 1);
3293 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3295
3296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3297 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3298 IEM_MC_ASSIGN(u32Src, u32Imm);
3299 if (pImpl->pfnLockedU32)
3300 IEMOP_HLP_DONE_DECODING();
3301 else
3302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3303 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3304 IEM_MC_FETCH_EFLAGS(EFlags);
3305 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3307 else
3308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3309
3310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3311 IEM_MC_COMMIT_EFLAGS(EFlags);
3312 IEM_MC_ADVANCE_RIP_AND_FINISH();
3313 IEM_MC_END();
3314 }
3315 break;
3316 }
3317
3318 case IEMMODE_64BIT:
3319 {
3320 if (IEM_IS_MODRM_REG_MODE(bRm))
3321 {
3322 /* register target */
3323 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3325 IEM_MC_BEGIN(3, 0);
3326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3327 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3329
3330 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3331 IEM_MC_REF_EFLAGS(pEFlags);
3332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3333
3334 IEM_MC_ADVANCE_RIP_AND_FINISH();
3335 IEM_MC_END();
3336 }
3337 else
3338 {
3339 /* memory target */
3340 uint32_t fAccess;
3341 if (pImpl->pfnLockedU64)
3342 fAccess = IEM_ACCESS_DATA_RW;
3343 else /* CMP */
3344 fAccess = IEM_ACCESS_DATA_R;
3345 IEM_MC_BEGIN(3, 2);
3346 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3347 IEM_MC_ARG(uint64_t, u64Src, 1);
3348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3350
3351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3352 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3353 if (pImpl->pfnLockedU64)
3354 IEMOP_HLP_DONE_DECODING();
3355 else
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_ASSIGN(u64Src, u64Imm);
3358 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3359 IEM_MC_FETCH_EFLAGS(EFlags);
3360 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3362 else
3363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3364
3365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3366 IEM_MC_COMMIT_EFLAGS(EFlags);
3367 IEM_MC_ADVANCE_RIP_AND_FINISH();
3368 IEM_MC_END();
3369 }
3370 break;
3371 }
3372
3373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3374 }
3375}
3376
3377
3378/**
3379 * @opcode 0x82
3380 * @opmnemonic grp1_82
3381 * @opgroup og_groups
3382 */
3383FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3384{
3385 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3386 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3387}
3388
3389
3390/**
3391 * @opcode 0x83
3392 */
3393FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3394{
3395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3396 switch (IEM_GET_MODRM_REG_8(bRm))
3397 {
3398 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3399 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3400 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3401 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3402 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3403 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3404 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3405 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3406 }
3407 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3408 to the 386 even if absent in the intel reference manuals and some
3409 3rd party opcode listings. */
3410 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3411
3412 if (IEM_IS_MODRM_REG_MODE(bRm))
3413 {
3414 /*
3415 * Register target
3416 */
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3419 switch (pVCpu->iem.s.enmEffOpSize)
3420 {
3421 case IEMMODE_16BIT:
3422 {
3423 IEM_MC_BEGIN(3, 0);
3424 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3425 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3426 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3427
3428 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3429 IEM_MC_REF_EFLAGS(pEFlags);
3430 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3431
3432 IEM_MC_ADVANCE_RIP_AND_FINISH();
3433 IEM_MC_END();
3434 break;
3435 }
3436
3437 case IEMMODE_32BIT:
3438 {
3439 IEM_MC_BEGIN(3, 0);
3440 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3441 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3442 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3443
3444 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3445 IEM_MC_REF_EFLAGS(pEFlags);
3446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3447 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3448 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3449
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 break;
3453 }
3454
3455 case IEMMODE_64BIT:
3456 {
3457 IEM_MC_BEGIN(3, 0);
3458 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3459 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3460 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3461
3462 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3463 IEM_MC_REF_EFLAGS(pEFlags);
3464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3465
3466 IEM_MC_ADVANCE_RIP_AND_FINISH();
3467 IEM_MC_END();
3468 break;
3469 }
3470
3471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3472 }
3473 }
3474 else
3475 {
3476 /*
3477 * Memory target.
3478 */
3479 uint32_t fAccess;
3480 if (pImpl->pfnLockedU16)
3481 fAccess = IEM_ACCESS_DATA_RW;
3482 else /* CMP */
3483 fAccess = IEM_ACCESS_DATA_R;
3484
3485 switch (pVCpu->iem.s.enmEffOpSize)
3486 {
3487 case IEMMODE_16BIT:
3488 {
3489 IEM_MC_BEGIN(3, 2);
3490 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3491 IEM_MC_ARG(uint16_t, u16Src, 1);
3492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3494
3495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3496 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3497 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3498 if (pImpl->pfnLockedU16)
3499 IEMOP_HLP_DONE_DECODING();
3500 else
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3503 IEM_MC_FETCH_EFLAGS(EFlags);
3504 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3506 else
3507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3508
3509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3510 IEM_MC_COMMIT_EFLAGS(EFlags);
3511 IEM_MC_ADVANCE_RIP_AND_FINISH();
3512 IEM_MC_END();
3513 break;
3514 }
3515
3516 case IEMMODE_32BIT:
3517 {
3518 IEM_MC_BEGIN(3, 2);
3519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3520 IEM_MC_ARG(uint32_t, u32Src, 1);
3521 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3523
3524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3525 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3526 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3527 if (pImpl->pfnLockedU32)
3528 IEMOP_HLP_DONE_DECODING();
3529 else
3530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3531 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3532 IEM_MC_FETCH_EFLAGS(EFlags);
3533 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3535 else
3536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3537
3538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3539 IEM_MC_COMMIT_EFLAGS(EFlags);
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 IEM_MC_END();
3542 break;
3543 }
3544
3545 case IEMMODE_64BIT:
3546 {
3547 IEM_MC_BEGIN(3, 2);
3548 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3549 IEM_MC_ARG(uint64_t, u64Src, 1);
3550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3554 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3555 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3556 if (pImpl->pfnLockedU64)
3557 IEMOP_HLP_DONE_DECODING();
3558 else
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3561 IEM_MC_FETCH_EFLAGS(EFlags);
3562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3564 else
3565 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3566
3567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3568 IEM_MC_COMMIT_EFLAGS(EFlags);
3569 IEM_MC_ADVANCE_RIP_AND_FINISH();
3570 IEM_MC_END();
3571 break;
3572 }
3573
3574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3575 }
3576 }
3577}
3578
3579
3580/**
3581 * @opcode 0x84
3582 */
3583FNIEMOP_DEF(iemOp_test_Eb_Gb)
3584{
3585 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3587 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3588}
3589
3590
3591/**
3592 * @opcode 0x85
3593 */
3594FNIEMOP_DEF(iemOp_test_Ev_Gv)
3595{
3596 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3597 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3598 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3599}
3600
3601
3602/**
3603 * @opcode 0x86
3604 */
3605FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3606{
3607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3608 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3609
3610 /*
3611 * If rm is denoting a register, no more instruction bytes.
3612 */
3613 if (IEM_IS_MODRM_REG_MODE(bRm))
3614 {
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616
3617 IEM_MC_BEGIN(0, 2);
3618 IEM_MC_LOCAL(uint8_t, uTmp1);
3619 IEM_MC_LOCAL(uint8_t, uTmp2);
3620
3621 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3622 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3623 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3624 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3625
3626 IEM_MC_ADVANCE_RIP_AND_FINISH();
3627 IEM_MC_END();
3628 }
3629 else
3630 {
3631 /*
3632 * We're accessing memory.
3633 */
3634/** @todo the register must be committed separately! */
3635 IEM_MC_BEGIN(2, 2);
3636 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3637 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3639
3640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3641 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3642 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3643 if (!pVCpu->iem.s.fDisregardLock)
3644 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3645 else
3646 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3648
3649 IEM_MC_ADVANCE_RIP_AND_FINISH();
3650 IEM_MC_END();
3651 }
3652}
3653
3654
3655/**
3656 * @opcode 0x87
3657 */
3658FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3659{
3660 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3662
3663 /*
3664 * If rm is denoting a register, no more instruction bytes.
3665 */
3666 if (IEM_IS_MODRM_REG_MODE(bRm))
3667 {
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669
3670 switch (pVCpu->iem.s.enmEffOpSize)
3671 {
3672 case IEMMODE_16BIT:
3673 IEM_MC_BEGIN(0, 2);
3674 IEM_MC_LOCAL(uint16_t, uTmp1);
3675 IEM_MC_LOCAL(uint16_t, uTmp2);
3676
3677 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3678 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3679 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3680 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3681
3682 IEM_MC_ADVANCE_RIP_AND_FINISH();
3683 IEM_MC_END();
3684 return VINF_SUCCESS;
3685
3686 case IEMMODE_32BIT:
3687 IEM_MC_BEGIN(0, 2);
3688 IEM_MC_LOCAL(uint32_t, uTmp1);
3689 IEM_MC_LOCAL(uint32_t, uTmp2);
3690
3691 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3692 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3693 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3694 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3695
3696 IEM_MC_ADVANCE_RIP_AND_FINISH();
3697 IEM_MC_END();
3698 return VINF_SUCCESS;
3699
3700 case IEMMODE_64BIT:
3701 IEM_MC_BEGIN(0, 2);
3702 IEM_MC_LOCAL(uint64_t, uTmp1);
3703 IEM_MC_LOCAL(uint64_t, uTmp2);
3704
3705 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3706 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3708 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 return VINF_SUCCESS;
3713
3714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3715 }
3716 }
3717 else
3718 {
3719 /*
3720 * We're accessing memory.
3721 */
3722 switch (pVCpu->iem.s.enmEffOpSize)
3723 {
3724/** @todo the register must be committed separately! */
3725 case IEMMODE_16BIT:
3726 IEM_MC_BEGIN(2, 2);
3727 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3728 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3732 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3733 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3734 if (!pVCpu->iem.s.fDisregardLock)
3735 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3736 else
3737 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3739
3740 IEM_MC_ADVANCE_RIP_AND_FINISH();
3741 IEM_MC_END();
3742 return VINF_SUCCESS;
3743
3744 case IEMMODE_32BIT:
3745 IEM_MC_BEGIN(2, 2);
3746 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3747 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3749
3750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3751 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3752 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3753 if (!pVCpu->iem.s.fDisregardLock)
3754 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3755 else
3756 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3758
3759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3760 IEM_MC_ADVANCE_RIP_AND_FINISH();
3761 IEM_MC_END();
3762 return VINF_SUCCESS;
3763
3764 case IEMMODE_64BIT:
3765 IEM_MC_BEGIN(2, 2);
3766 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3767 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3769
3770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3771 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3772 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3773 if (!pVCpu->iem.s.fDisregardLock)
3774 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3775 else
3776 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3777 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3778
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 IEM_MC_END();
3781 return VINF_SUCCESS;
3782
3783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3784 }
3785 }
3786}
3787
3788
3789/**
3790 * @opcode 0x88
3791 */
3792FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3793{
3794 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3795
3796 uint8_t bRm;
3797 IEM_OPCODE_GET_NEXT_U8(&bRm);
3798
3799 /*
3800 * If rm is denoting a register, no more instruction bytes.
3801 */
3802 if (IEM_IS_MODRM_REG_MODE(bRm))
3803 {
3804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3805 IEM_MC_BEGIN(0, 1);
3806 IEM_MC_LOCAL(uint8_t, u8Value);
3807 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3808 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
3809 IEM_MC_ADVANCE_RIP_AND_FINISH();
3810 IEM_MC_END();
3811 }
3812 else
3813 {
3814 /*
3815 * We're writing a register to memory.
3816 */
3817 IEM_MC_BEGIN(0, 2);
3818 IEM_MC_LOCAL(uint8_t, u8Value);
3819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3823 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3824 IEM_MC_ADVANCE_RIP_AND_FINISH();
3825 IEM_MC_END();
3826 }
3827}
3828
3829
3830/**
3831 * @opcode 0x89
3832 */
3833FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3834{
3835 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3836
3837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3838
3839 /*
3840 * If rm is denoting a register, no more instruction bytes.
3841 */
3842 if (IEM_IS_MODRM_REG_MODE(bRm))
3843 {
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 switch (pVCpu->iem.s.enmEffOpSize)
3846 {
3847 case IEMMODE_16BIT:
3848 IEM_MC_BEGIN(0, 1);
3849 IEM_MC_LOCAL(uint16_t, u16Value);
3850 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3851 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
3852 IEM_MC_ADVANCE_RIP_AND_FINISH();
3853 IEM_MC_END();
3854 break;
3855
3856 case IEMMODE_32BIT:
3857 IEM_MC_BEGIN(0, 1);
3858 IEM_MC_LOCAL(uint32_t, u32Value);
3859 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3860 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 break;
3864
3865 case IEMMODE_64BIT:
3866 IEM_MC_BEGIN(0, 1);
3867 IEM_MC_LOCAL(uint64_t, u64Value);
3868 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3869 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
3870 IEM_MC_ADVANCE_RIP_AND_FINISH();
3871 IEM_MC_END();
3872 break;
3873
3874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3875 }
3876 }
3877 else
3878 {
3879 /*
3880 * We're writing a register to memory.
3881 */
3882 switch (pVCpu->iem.s.enmEffOpSize)
3883 {
3884 case IEMMODE_16BIT:
3885 IEM_MC_BEGIN(0, 2);
3886 IEM_MC_LOCAL(uint16_t, u16Value);
3887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3891 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3892 IEM_MC_ADVANCE_RIP_AND_FINISH();
3893 IEM_MC_END();
3894 break;
3895
3896 case IEMMODE_32BIT:
3897 IEM_MC_BEGIN(0, 2);
3898 IEM_MC_LOCAL(uint32_t, u32Value);
3899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3903 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3904 IEM_MC_ADVANCE_RIP_AND_FINISH();
3905 IEM_MC_END();
3906 break;
3907
3908 case IEMMODE_64BIT:
3909 IEM_MC_BEGIN(0, 2);
3910 IEM_MC_LOCAL(uint64_t, u64Value);
3911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3915 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3916 IEM_MC_ADVANCE_RIP_AND_FINISH();
3917 IEM_MC_END();
3918 break;
3919
3920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3921 }
3922 }
3923}
3924
3925
3926/**
3927 * @opcode 0x8a
3928 */
3929FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3930{
3931 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3932
3933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3934
3935 /*
3936 * If rm is denoting a register, no more instruction bytes.
3937 */
3938 if (IEM_IS_MODRM_REG_MODE(bRm))
3939 {
3940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3941 IEM_MC_BEGIN(0, 1);
3942 IEM_MC_LOCAL(uint8_t, u8Value);
3943 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3944 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3945 IEM_MC_ADVANCE_RIP_AND_FINISH();
3946 IEM_MC_END();
3947 }
3948 else
3949 {
3950 /*
3951 * We're loading a register from memory.
3952 */
3953 IEM_MC_BEGIN(0, 2);
3954 IEM_MC_LOCAL(uint8_t, u8Value);
3955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3958 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3959 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3960 IEM_MC_ADVANCE_RIP_AND_FINISH();
3961 IEM_MC_END();
3962 }
3963}
3964
3965
3966/**
3967 * @opcode 0x8b
3968 */
3969FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3970{
3971 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3972
3973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3974
3975 /*
3976 * If rm is denoting a register, no more instruction bytes.
3977 */
3978 if (IEM_IS_MODRM_REG_MODE(bRm))
3979 {
3980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3981 switch (pVCpu->iem.s.enmEffOpSize)
3982 {
3983 case IEMMODE_16BIT:
3984 IEM_MC_BEGIN(0, 1);
3985 IEM_MC_LOCAL(uint16_t, u16Value);
3986 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3987 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
3988 IEM_MC_ADVANCE_RIP_AND_FINISH();
3989 IEM_MC_END();
3990 break;
3991
3992 case IEMMODE_32BIT:
3993 IEM_MC_BEGIN(0, 1);
3994 IEM_MC_LOCAL(uint32_t, u32Value);
3995 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3996 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
3997 IEM_MC_ADVANCE_RIP_AND_FINISH();
3998 IEM_MC_END();
3999 break;
4000
4001 case IEMMODE_64BIT:
4002 IEM_MC_BEGIN(0, 1);
4003 IEM_MC_LOCAL(uint64_t, u64Value);
4004 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4005 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4006 IEM_MC_ADVANCE_RIP_AND_FINISH();
4007 IEM_MC_END();
4008 break;
4009
4010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4011 }
4012 }
4013 else
4014 {
4015 /*
4016 * We're loading a register from memory.
4017 */
4018 switch (pVCpu->iem.s.enmEffOpSize)
4019 {
4020 case IEMMODE_16BIT:
4021 IEM_MC_BEGIN(0, 2);
4022 IEM_MC_LOCAL(uint16_t, u16Value);
4023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4027 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4028 IEM_MC_ADVANCE_RIP_AND_FINISH();
4029 IEM_MC_END();
4030 break;
4031
4032 case IEMMODE_32BIT:
4033 IEM_MC_BEGIN(0, 2);
4034 IEM_MC_LOCAL(uint32_t, u32Value);
4035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4038 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4039 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4040 IEM_MC_ADVANCE_RIP_AND_FINISH();
4041 IEM_MC_END();
4042 break;
4043
4044 case IEMMODE_64BIT:
4045 IEM_MC_BEGIN(0, 2);
4046 IEM_MC_LOCAL(uint64_t, u64Value);
4047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4050 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4051 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4052 IEM_MC_ADVANCE_RIP_AND_FINISH();
4053 IEM_MC_END();
4054 break;
4055
4056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4057 }
4058 }
4059}
4060
4061
4062/**
4063 * opcode 0x63
4064 * @todo Table fixme
4065 */
4066FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4067{
4068 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4069 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4070 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4071 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4072 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4073}
4074
4075
4076/**
4077 * @opcode 0x8c
4078 */
4079FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4080{
4081 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4082
4083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4084
4085 /*
4086 * Check that the destination register exists. The REX.R prefix is ignored.
4087 */
4088 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4089 if ( iSegReg > X86_SREG_GS)
4090 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4091
4092 /*
4093 * If rm is denoting a register, no more instruction bytes.
4094 * In that case, the operand size is respected and the upper bits are
4095 * cleared (starting with some pentium).
4096 */
4097 if (IEM_IS_MODRM_REG_MODE(bRm))
4098 {
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4100 switch (pVCpu->iem.s.enmEffOpSize)
4101 {
4102 case IEMMODE_16BIT:
4103 IEM_MC_BEGIN(0, 1);
4104 IEM_MC_LOCAL(uint16_t, u16Value);
4105 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4106 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4107 IEM_MC_ADVANCE_RIP_AND_FINISH();
4108 IEM_MC_END();
4109 break;
4110
4111 case IEMMODE_32BIT:
4112 IEM_MC_BEGIN(0, 1);
4113 IEM_MC_LOCAL(uint32_t, u32Value);
4114 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4115 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4116 IEM_MC_ADVANCE_RIP_AND_FINISH();
4117 IEM_MC_END();
4118 break;
4119
4120 case IEMMODE_64BIT:
4121 IEM_MC_BEGIN(0, 1);
4122 IEM_MC_LOCAL(uint64_t, u64Value);
4123 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4124 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4125 IEM_MC_ADVANCE_RIP_AND_FINISH();
4126 IEM_MC_END();
4127 break;
4128
4129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4130 }
4131 }
4132 else
4133 {
4134 /*
4135 * We're saving the register to memory. The access is word sized
4136 * regardless of operand size prefixes.
4137 */
4138#if 0 /* not necessary */
4139 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4140#endif
4141 IEM_MC_BEGIN(0, 2);
4142 IEM_MC_LOCAL(uint16_t, u16Value);
4143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4147 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4148 IEM_MC_ADVANCE_RIP_AND_FINISH();
4149 IEM_MC_END();
4150 }
4151}
4152
4153
4154
4155
4156/**
4157 * @opcode 0x8d
4158 */
4159FNIEMOP_DEF(iemOp_lea_Gv_M)
4160{
4161 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4163 if (IEM_IS_MODRM_REG_MODE(bRm))
4164 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4165
4166 switch (pVCpu->iem.s.enmEffOpSize)
4167 {
4168 case IEMMODE_16BIT:
4169 IEM_MC_BEGIN(0, 2);
4170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4171 IEM_MC_LOCAL(uint16_t, u16Cast);
4172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4174 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4175 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
4176 IEM_MC_ADVANCE_RIP_AND_FINISH();
4177 IEM_MC_END();
4178 break;
4179
4180 case IEMMODE_32BIT:
4181 IEM_MC_BEGIN(0, 2);
4182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4183 IEM_MC_LOCAL(uint32_t, u32Cast);
4184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4186 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4187 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
4188 IEM_MC_ADVANCE_RIP_AND_FINISH();
4189 IEM_MC_END();
4190 break;
4191
4192 case IEMMODE_64BIT:
4193 IEM_MC_BEGIN(0, 1);
4194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4197 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
4198 IEM_MC_ADVANCE_RIP_AND_FINISH();
4199 IEM_MC_END();
4200 break;
4201
4202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4203 }
4204}
4205
4206
4207/**
4208 * @opcode 0x8e
4209 */
4210FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4211{
4212 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4213
4214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4215
4216 /*
4217 * The practical operand size is 16-bit.
4218 */
4219#if 0 /* not necessary */
4220 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4221#endif
4222
4223 /*
4224 * Check that the destination register exists and can be used with this
4225 * instruction. The REX.R prefix is ignored.
4226 */
4227 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4228 if ( iSegReg == X86_SREG_CS
4229 || iSegReg > X86_SREG_GS)
4230 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4231
4232 /*
4233 * If rm is denoting a register, no more instruction bytes.
4234 */
4235 if (IEM_IS_MODRM_REG_MODE(bRm))
4236 {
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4238 IEM_MC_BEGIN(2, 0);
4239 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4240 IEM_MC_ARG(uint16_t, u16Value, 1);
4241 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4242 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /*
4248 * We're loading the register from memory. The access is word sized
4249 * regardless of operand size prefixes.
4250 */
4251 IEM_MC_BEGIN(2, 1);
4252 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4253 IEM_MC_ARG(uint16_t, u16Value, 1);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4257 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4258 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4259 IEM_MC_END();
4260 }
4261 return VINF_SUCCESS;
4262}
4263
4264
4265/** Opcode 0x8f /0. */
4266FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4267{
4268 /* This bugger is rather annoying as it requires rSP to be updated before
4269 doing the effective address calculations. Will eventually require a
4270 split between the R/M+SIB decoding and the effective address
4271 calculation - which is something that is required for any attempt at
4272 reusing this code for a recompiler. It may also be good to have if we
4273 need to delay #UD exception caused by invalid lock prefixes.
4274
4275 For now, we'll do a mostly safe interpreter-only implementation here. */
4276 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4277 * now until tests show it's checked.. */
4278 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4279
4280 /* Register access is relatively easy and can share code. */
4281 if (IEM_IS_MODRM_REG_MODE(bRm))
4282 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
4283
4284 /*
4285 * Memory target.
4286 *
4287 * Intel says that RSP is incremented before it's used in any effective
4288 * address calcuations. This means some serious extra annoyance here since
4289 * we decode and calculate the effective address in one step and like to
4290 * delay committing registers till everything is done.
4291 *
4292 * So, we'll decode and calculate the effective address twice. This will
4293 * require some recoding if turned into a recompiler.
4294 */
4295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4296
4297#ifndef TST_IEM_CHECK_MC
4298 /* Calc effective address with modified ESP. */
4299/** @todo testcase */
4300 RTGCPTR GCPtrEff;
4301 VBOXSTRICTRC rcStrict;
4302 switch (pVCpu->iem.s.enmEffOpSize)
4303 {
4304 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4305 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4306 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4308 }
4309 if (rcStrict != VINF_SUCCESS)
4310 return rcStrict;
4311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4312
4313 /* Perform the operation - this should be CImpl. */
4314 RTUINT64U TmpRsp;
4315 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4316 switch (pVCpu->iem.s.enmEffOpSize)
4317 {
4318 case IEMMODE_16BIT:
4319 {
4320 uint16_t u16Value;
4321 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4322 if (rcStrict == VINF_SUCCESS)
4323 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4324 break;
4325 }
4326
4327 case IEMMODE_32BIT:
4328 {
4329 uint32_t u32Value;
4330 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4331 if (rcStrict == VINF_SUCCESS)
4332 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4333 break;
4334 }
4335
4336 case IEMMODE_64BIT:
4337 {
4338 uint64_t u64Value;
4339 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4340 if (rcStrict == VINF_SUCCESS)
4341 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4342 break;
4343 }
4344
4345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4346 }
4347 if (rcStrict == VINF_SUCCESS)
4348 {
4349 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4350 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
4351 }
4352 return rcStrict;
4353
4354#else
4355 return VERR_IEM_IPE_2;
4356#endif
4357}
4358
4359
4360/**
4361 * @opcode 0x8f
4362 */
4363FNIEMOP_DEF(iemOp_Grp1A__xop)
4364{
4365 /*
4366 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4367 * three byte VEX prefix, except that the mmmmm field cannot have the values
4368 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4369 */
4370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4371 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4372 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4373
4374 IEMOP_MNEMONIC(xop, "xop");
4375 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4376 {
4377 /** @todo Test when exctly the XOP conformance checks kick in during
4378 * instruction decoding and fetching (using \#PF). */
4379 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4380 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4381 if ( ( pVCpu->iem.s.fPrefixes
4382 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4383 == 0)
4384 {
4385 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4386 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4387 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4388 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4389 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4390 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4391 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4392 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4393 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4394
4395 /** @todo XOP: Just use new tables and decoders. */
4396 switch (bRm & 0x1f)
4397 {
4398 case 8: /* xop opcode map 8. */
4399 IEMOP_BITCH_ABOUT_STUB();
4400 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4401
4402 case 9: /* xop opcode map 9. */
4403 IEMOP_BITCH_ABOUT_STUB();
4404 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4405
4406 case 10: /* xop opcode map 10. */
4407 IEMOP_BITCH_ABOUT_STUB();
4408 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4409
4410 default:
4411 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4412 return IEMOP_RAISE_INVALID_OPCODE();
4413 }
4414 }
4415 else
4416 Log(("XOP: Invalid prefix mix!\n"));
4417 }
4418 else
4419 Log(("XOP: XOP support disabled!\n"));
4420 return IEMOP_RAISE_INVALID_OPCODE();
4421}
4422
4423
4424/**
4425 * Common 'xchg reg,rAX' helper.
4426 */
4427FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4428{
4429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4430
4431 iReg |= pVCpu->iem.s.uRexB;
4432 switch (pVCpu->iem.s.enmEffOpSize)
4433 {
4434 case IEMMODE_16BIT:
4435 IEM_MC_BEGIN(0, 2);
4436 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4437 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4438 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4439 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4440 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4441 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4442 IEM_MC_ADVANCE_RIP_AND_FINISH();
4443 IEM_MC_END();
4444 break;
4445
4446 case IEMMODE_32BIT:
4447 IEM_MC_BEGIN(0, 2);
4448 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4449 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4450 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4451 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4452 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4453 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4454 IEM_MC_ADVANCE_RIP_AND_FINISH();
4455 IEM_MC_END();
4456 break;
4457
4458 case IEMMODE_64BIT:
4459 IEM_MC_BEGIN(0, 2);
4460 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4461 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4462 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4463 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4464 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4465 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4466 IEM_MC_ADVANCE_RIP_AND_FINISH();
4467 IEM_MC_END();
4468 break;
4469
4470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4471 }
4472}
4473
4474
4475/**
4476 * @opcode 0x90
4477 */
4478FNIEMOP_DEF(iemOp_nop)
4479{
4480 /* R8/R8D and RAX/EAX can be exchanged. */
4481 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4482 {
4483 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4484 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4485 }
4486
4487 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4488 {
4489 IEMOP_MNEMONIC(pause, "pause");
4490#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4491 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4492 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4493#endif
4494#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4495 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4496 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4497#endif
4498 }
4499 else
4500 IEMOP_MNEMONIC(nop, "nop");
4501 IEM_MC_BEGIN(0, 0);
4502 IEM_MC_ADVANCE_RIP_AND_FINISH();
4503 IEM_MC_END();
4504}
4505
4506
4507/**
4508 * @opcode 0x91
4509 */
4510FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4511{
4512 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4513 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4514}
4515
4516
4517/**
4518 * @opcode 0x92
4519 */
4520FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4521{
4522 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4523 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4524}
4525
4526
4527/**
4528 * @opcode 0x93
4529 */
4530FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4531{
4532 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4533 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4534}
4535
4536
4537/**
4538 * @opcode 0x94
4539 */
4540FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4541{
4542 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4543 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4544}
4545
4546
4547/**
4548 * @opcode 0x95
4549 */
4550FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4551{
4552 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4553 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4554}
4555
4556
4557/**
4558 * @opcode 0x96
4559 */
4560FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4561{
4562 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4563 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4564}
4565
4566
4567/**
4568 * @opcode 0x97
4569 */
4570FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4571{
4572 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4573 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4574}
4575
4576
4577/**
4578 * @opcode 0x98
4579 */
4580FNIEMOP_DEF(iemOp_cbw)
4581{
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4583 switch (pVCpu->iem.s.enmEffOpSize)
4584 {
4585 case IEMMODE_16BIT:
4586 IEMOP_MNEMONIC(cbw, "cbw");
4587 IEM_MC_BEGIN(0, 1);
4588 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4589 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4590 } IEM_MC_ELSE() {
4591 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP_AND_FINISH();
4594 IEM_MC_END();
4595 break;
4596
4597 case IEMMODE_32BIT:
4598 IEMOP_MNEMONIC(cwde, "cwde");
4599 IEM_MC_BEGIN(0, 1);
4600 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4601 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4602 } IEM_MC_ELSE() {
4603 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4604 } IEM_MC_ENDIF();
4605 IEM_MC_ADVANCE_RIP_AND_FINISH();
4606 IEM_MC_END();
4607 break;
4608
4609 case IEMMODE_64BIT:
4610 IEMOP_MNEMONIC(cdqe, "cdqe");
4611 IEM_MC_BEGIN(0, 1);
4612 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4613 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4614 } IEM_MC_ELSE() {
4615 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4616 } IEM_MC_ENDIF();
4617 IEM_MC_ADVANCE_RIP_AND_FINISH();
4618 IEM_MC_END();
4619 break;
4620
4621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4622 }
4623}
4624
4625
4626/**
4627 * @opcode 0x99
4628 */
4629FNIEMOP_DEF(iemOp_cwd)
4630{
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 switch (pVCpu->iem.s.enmEffOpSize)
4633 {
4634 case IEMMODE_16BIT:
4635 IEMOP_MNEMONIC(cwd, "cwd");
4636 IEM_MC_BEGIN(0, 1);
4637 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4638 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4639 } IEM_MC_ELSE() {
4640 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4641 } IEM_MC_ENDIF();
4642 IEM_MC_ADVANCE_RIP_AND_FINISH();
4643 IEM_MC_END();
4644 break;
4645
4646 case IEMMODE_32BIT:
4647 IEMOP_MNEMONIC(cdq, "cdq");
4648 IEM_MC_BEGIN(0, 1);
4649 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4650 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4651 } IEM_MC_ELSE() {
4652 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4653 } IEM_MC_ENDIF();
4654 IEM_MC_ADVANCE_RIP_AND_FINISH();
4655 IEM_MC_END();
4656 break;
4657
4658 case IEMMODE_64BIT:
4659 IEMOP_MNEMONIC(cqo, "cqo");
4660 IEM_MC_BEGIN(0, 1);
4661 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4662 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4663 } IEM_MC_ELSE() {
4664 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4665 } IEM_MC_ENDIF();
4666 IEM_MC_ADVANCE_RIP_AND_FINISH();
4667 IEM_MC_END();
4668 break;
4669
4670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4671 }
4672}
4673
4674
4675/**
4676 * @opcode 0x9a
4677 */
4678FNIEMOP_DEF(iemOp_call_Ap)
4679{
4680 IEMOP_MNEMONIC(call_Ap, "call Ap");
4681 IEMOP_HLP_NO_64BIT();
4682
4683 /* Decode the far pointer address and pass it on to the far call C implementation. */
4684 uint32_t offSeg;
4685 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4686 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4687 else
4688 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4689 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4691 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4692}
4693
4694
4695/** Opcode 0x9b. (aka fwait) */
4696FNIEMOP_DEF(iemOp_wait)
4697{
4698 IEMOP_MNEMONIC(wait, "wait");
4699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4700
4701 IEM_MC_BEGIN(0, 0);
4702 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4704 IEM_MC_ADVANCE_RIP_AND_FINISH();
4705 IEM_MC_END();
4706}
4707
4708
4709/**
4710 * @opcode 0x9c
4711 */
4712FNIEMOP_DEF(iemOp_pushf_Fv)
4713{
4714 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4717 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4718}
4719
4720
4721/**
4722 * @opcode 0x9d
4723 */
4724FNIEMOP_DEF(iemOp_popf_Fv)
4725{
4726 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4729 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4730}
4731
4732
4733/**
4734 * @opcode 0x9e
4735 */
4736FNIEMOP_DEF(iemOp_sahf)
4737{
4738 IEMOP_MNEMONIC(sahf, "sahf");
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4741 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4742 return IEMOP_RAISE_INVALID_OPCODE();
4743 IEM_MC_BEGIN(0, 2);
4744 IEM_MC_LOCAL(uint32_t, u32Flags);
4745 IEM_MC_LOCAL(uint32_t, EFlags);
4746 IEM_MC_FETCH_EFLAGS(EFlags);
4747 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4748 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4749 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4750 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4751 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4752 IEM_MC_COMMIT_EFLAGS(EFlags);
4753 IEM_MC_ADVANCE_RIP_AND_FINISH();
4754 IEM_MC_END();
4755}
4756
4757
4758/**
4759 * @opcode 0x9f
4760 */
4761FNIEMOP_DEF(iemOp_lahf)
4762{
4763 IEMOP_MNEMONIC(lahf, "lahf");
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4765 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4766 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4767 return IEMOP_RAISE_INVALID_OPCODE();
4768 IEM_MC_BEGIN(0, 1);
4769 IEM_MC_LOCAL(uint8_t, u8Flags);
4770 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4771 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4772 IEM_MC_ADVANCE_RIP_AND_FINISH();
4773 IEM_MC_END();
4774}
4775
4776
4777/**
4778 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4779 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4780 * prefixes. Will return on failures.
4781 * @param a_GCPtrMemOff The variable to store the offset in.
4782 */
4783#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4784 do \
4785 { \
4786 switch (pVCpu->iem.s.enmEffAddrMode) \
4787 { \
4788 case IEMMODE_16BIT: \
4789 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4790 break; \
4791 case IEMMODE_32BIT: \
4792 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4793 break; \
4794 case IEMMODE_64BIT: \
4795 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4796 break; \
4797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4798 } \
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4800 } while (0)
4801
4802/**
4803 * @opcode 0xa0
4804 */
4805FNIEMOP_DEF(iemOp_mov_AL_Ob)
4806{
4807 /*
4808 * Get the offset and fend off lock prefixes.
4809 */
4810 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4811 RTGCPTR GCPtrMemOff;
4812 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4813
4814 /*
4815 * Fetch AL.
4816 */
4817 IEM_MC_BEGIN(0,1);
4818 IEM_MC_LOCAL(uint8_t, u8Tmp);
4819 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4820 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4821 IEM_MC_ADVANCE_RIP_AND_FINISH();
4822 IEM_MC_END();
4823}
4824
4825
4826/**
4827 * @opcode 0xa1
4828 */
4829FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4830{
4831 /*
4832 * Get the offset and fend off lock prefixes.
4833 */
4834 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4835 RTGCPTR GCPtrMemOff;
4836 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4837
4838 /*
4839 * Fetch rAX.
4840 */
4841 switch (pVCpu->iem.s.enmEffOpSize)
4842 {
4843 case IEMMODE_16BIT:
4844 IEM_MC_BEGIN(0,1);
4845 IEM_MC_LOCAL(uint16_t, u16Tmp);
4846 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4847 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4848 IEM_MC_ADVANCE_RIP_AND_FINISH();
4849 IEM_MC_END();
4850 break;
4851
4852 case IEMMODE_32BIT:
4853 IEM_MC_BEGIN(0,1);
4854 IEM_MC_LOCAL(uint32_t, u32Tmp);
4855 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4856 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4857 IEM_MC_ADVANCE_RIP_AND_FINISH();
4858 IEM_MC_END();
4859 break;
4860
4861 case IEMMODE_64BIT:
4862 IEM_MC_BEGIN(0,1);
4863 IEM_MC_LOCAL(uint64_t, u64Tmp);
4864 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4865 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4866 IEM_MC_ADVANCE_RIP_AND_FINISH();
4867 IEM_MC_END();
4868 break;
4869
4870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4871 }
4872}
4873
4874
4875/**
4876 * @opcode 0xa2
4877 */
4878FNIEMOP_DEF(iemOp_mov_Ob_AL)
4879{
4880 /*
4881 * Get the offset and fend off lock prefixes.
4882 */
4883 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4884 RTGCPTR GCPtrMemOff;
4885 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4886
4887 /*
4888 * Store AL.
4889 */
4890 IEM_MC_BEGIN(0,1);
4891 IEM_MC_LOCAL(uint8_t, u8Tmp);
4892 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4893 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4894 IEM_MC_ADVANCE_RIP_AND_FINISH();
4895 IEM_MC_END();
4896}
4897
4898
4899/**
4900 * @opcode 0xa3
4901 */
4902FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4903{
4904 /*
4905 * Get the offset and fend off lock prefixes.
4906 */
4907 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4908 RTGCPTR GCPtrMemOff;
4909 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4910
4911 /*
4912 * Store rAX.
4913 */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(0,1);
4918 IEM_MC_LOCAL(uint16_t, u16Tmp);
4919 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4920 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4921 IEM_MC_ADVANCE_RIP_AND_FINISH();
4922 IEM_MC_END();
4923 break;
4924
4925 case IEMMODE_32BIT:
4926 IEM_MC_BEGIN(0,1);
4927 IEM_MC_LOCAL(uint32_t, u32Tmp);
4928 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4929 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4930 IEM_MC_ADVANCE_RIP_AND_FINISH();
4931 IEM_MC_END();
4932 break;
4933
4934 case IEMMODE_64BIT:
4935 IEM_MC_BEGIN(0,1);
4936 IEM_MC_LOCAL(uint64_t, u64Tmp);
4937 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4938 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4939 IEM_MC_ADVANCE_RIP_AND_FINISH();
4940 IEM_MC_END();
4941 break;
4942
4943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4944 }
4945}
4946
4947/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4948#define IEM_MOVS_CASE(ValBits, AddrBits) \
4949 IEM_MC_BEGIN(0, 2); \
4950 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4951 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4952 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4953 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4954 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4955 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4957 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4958 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4959 } IEM_MC_ELSE() { \
4960 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4961 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4962 } IEM_MC_ENDIF(); \
4963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4964 IEM_MC_END()
4965
4966/**
4967 * @opcode 0xa4
4968 */
4969FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4970{
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972
4973 /*
4974 * Use the C implementation if a repeat prefix is encountered.
4975 */
4976 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4977 {
4978 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4979 switch (pVCpu->iem.s.enmEffAddrMode)
4980 {
4981 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4982 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4983 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4985 }
4986 }
4987 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4988
4989 /*
4990 * Sharing case implementation with movs[wdq] below.
4991 */
4992 switch (pVCpu->iem.s.enmEffAddrMode)
4993 {
4994 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4995 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4996 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4998 }
4999}
5000
5001
5002/**
5003 * @opcode 0xa5
5004 */
5005FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5006{
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008
5009 /*
5010 * Use the C implementation if a repeat prefix is encountered.
5011 */
5012 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5013 {
5014 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5015 switch (pVCpu->iem.s.enmEffOpSize)
5016 {
5017 case IEMMODE_16BIT:
5018 switch (pVCpu->iem.s.enmEffAddrMode)
5019 {
5020 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5021 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5022 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5024 }
5025 break;
5026 case IEMMODE_32BIT:
5027 switch (pVCpu->iem.s.enmEffAddrMode)
5028 {
5029 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5033 }
5034 case IEMMODE_64BIT:
5035 switch (pVCpu->iem.s.enmEffAddrMode)
5036 {
5037 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5041 }
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 }
5045 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5046
5047 /*
5048 * Annoying double switch here.
5049 * Using ugly macro for implementing the cases, sharing it with movsb.
5050 */
5051 switch (pVCpu->iem.s.enmEffOpSize)
5052 {
5053 case IEMMODE_16BIT:
5054 switch (pVCpu->iem.s.enmEffAddrMode)
5055 {
5056 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5057 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5058 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061 break;
5062
5063 case IEMMODE_32BIT:
5064 switch (pVCpu->iem.s.enmEffAddrMode)
5065 {
5066 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5067 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5068 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5070 }
5071 break;
5072
5073 case IEMMODE_64BIT:
5074 switch (pVCpu->iem.s.enmEffAddrMode)
5075 {
5076 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5077 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5078 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5080 }
5081 break;
5082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5083 }
5084}
5085
5086#undef IEM_MOVS_CASE
5087
5088/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5089#define IEM_CMPS_CASE(ValBits, AddrBits) \
5090 IEM_MC_BEGIN(3, 3); \
5091 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5092 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5093 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5094 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5095 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5096 \
5097 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5098 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5099 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5100 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5101 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5102 IEM_MC_REF_EFLAGS(pEFlags); \
5103 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5104 \
5105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5106 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5107 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5108 } IEM_MC_ELSE() { \
5109 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5110 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5111 } IEM_MC_ENDIF(); \
5112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5113 IEM_MC_END()
5114
5115/**
5116 * @opcode 0xa6
5117 */
5118FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5119{
5120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5121
5122 /*
5123 * Use the C implementation if a repeat prefix is encountered.
5124 */
5125 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5126 {
5127 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5128 switch (pVCpu->iem.s.enmEffAddrMode)
5129 {
5130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5134 }
5135 }
5136 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5137 {
5138 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5139 switch (pVCpu->iem.s.enmEffAddrMode)
5140 {
5141 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5142 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5143 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5145 }
5146 }
5147 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5148
5149 /*
5150 * Sharing case implementation with cmps[wdq] below.
5151 */
5152 switch (pVCpu->iem.s.enmEffAddrMode)
5153 {
5154 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5155 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5156 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5158 }
5159}
5160
5161
5162/**
5163 * @opcode 0xa7
5164 */
5165FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5166{
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168
5169 /*
5170 * Use the C implementation if a repeat prefix is encountered.
5171 */
5172 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5173 {
5174 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5175 switch (pVCpu->iem.s.enmEffOpSize)
5176 {
5177 case IEMMODE_16BIT:
5178 switch (pVCpu->iem.s.enmEffAddrMode)
5179 {
5180 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5181 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5182 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5184 }
5185 break;
5186 case IEMMODE_32BIT:
5187 switch (pVCpu->iem.s.enmEffAddrMode)
5188 {
5189 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5190 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5191 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5193 }
5194 case IEMMODE_64BIT:
5195 switch (pVCpu->iem.s.enmEffAddrMode)
5196 {
5197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5198 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5199 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5201 }
5202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5203 }
5204 }
5205
5206 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5207 {
5208 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5209 switch (pVCpu->iem.s.enmEffOpSize)
5210 {
5211 case IEMMODE_16BIT:
5212 switch (pVCpu->iem.s.enmEffAddrMode)
5213 {
5214 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5215 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5216 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5218 }
5219 break;
5220 case IEMMODE_32BIT:
5221 switch (pVCpu->iem.s.enmEffAddrMode)
5222 {
5223 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5224 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5225 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5227 }
5228 case IEMMODE_64BIT:
5229 switch (pVCpu->iem.s.enmEffAddrMode)
5230 {
5231 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5232 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5233 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5235 }
5236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5237 }
5238 }
5239
5240 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5241
5242 /*
5243 * Annoying double switch here.
5244 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5245 */
5246 switch (pVCpu->iem.s.enmEffOpSize)
5247 {
5248 case IEMMODE_16BIT:
5249 switch (pVCpu->iem.s.enmEffAddrMode)
5250 {
5251 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5252 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5253 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5255 }
5256 break;
5257
5258 case IEMMODE_32BIT:
5259 switch (pVCpu->iem.s.enmEffAddrMode)
5260 {
5261 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5262 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5263 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 break;
5267
5268 case IEMMODE_64BIT:
5269 switch (pVCpu->iem.s.enmEffAddrMode)
5270 {
5271 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5272 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5273 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5275 }
5276 break;
5277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5278 }
5279}
5280
5281#undef IEM_CMPS_CASE
5282
5283/**
5284 * @opcode 0xa8
5285 */
5286FNIEMOP_DEF(iemOp_test_AL_Ib)
5287{
5288 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5290 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5291}
5292
5293
5294/**
5295 * @opcode 0xa9
5296 */
5297FNIEMOP_DEF(iemOp_test_eAX_Iz)
5298{
5299 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5300 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5301 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5302}
5303
5304
5305/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5306#define IEM_STOS_CASE(ValBits, AddrBits) \
5307 IEM_MC_BEGIN(0, 2); \
5308 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5309 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5310 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5311 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5312 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5314 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5315 } IEM_MC_ELSE() { \
5316 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5317 } IEM_MC_ENDIF(); \
5318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5319 IEM_MC_END()
5320
5321/**
5322 * @opcode 0xaa
5323 */
5324FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5325{
5326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5327
5328 /*
5329 * Use the C implementation if a repeat prefix is encountered.
5330 */
5331 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5332 {
5333 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5334 switch (pVCpu->iem.s.enmEffAddrMode)
5335 {
5336 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5337 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5338 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5340 }
5341 }
5342 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5343
5344 /*
5345 * Sharing case implementation with stos[wdq] below.
5346 */
5347 switch (pVCpu->iem.s.enmEffAddrMode)
5348 {
5349 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5350 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5351 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354}
5355
5356
5357/**
5358 * @opcode 0xab
5359 */
5360FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5361{
5362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5363
5364 /*
5365 * Use the C implementation if a repeat prefix is encountered.
5366 */
5367 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5368 {
5369 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5370 switch (pVCpu->iem.s.enmEffOpSize)
5371 {
5372 case IEMMODE_16BIT:
5373 switch (pVCpu->iem.s.enmEffAddrMode)
5374 {
5375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5379 }
5380 break;
5381 case IEMMODE_32BIT:
5382 switch (pVCpu->iem.s.enmEffAddrMode)
5383 {
5384 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5385 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5386 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5388 }
5389 case IEMMODE_64BIT:
5390 switch (pVCpu->iem.s.enmEffAddrMode)
5391 {
5392 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5393 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5394 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5396 }
5397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5398 }
5399 }
5400 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5401
5402 /*
5403 * Annoying double switch here.
5404 * Using ugly macro for implementing the cases, sharing it with stosb.
5405 */
5406 switch (pVCpu->iem.s.enmEffOpSize)
5407 {
5408 case IEMMODE_16BIT:
5409 switch (pVCpu->iem.s.enmEffAddrMode)
5410 {
5411 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5412 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5413 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5415 }
5416 break;
5417
5418 case IEMMODE_32BIT:
5419 switch (pVCpu->iem.s.enmEffAddrMode)
5420 {
5421 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5422 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5423 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5425 }
5426 break;
5427
5428 case IEMMODE_64BIT:
5429 switch (pVCpu->iem.s.enmEffAddrMode)
5430 {
5431 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5432 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5433 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5435 }
5436 break;
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5438 }
5439}
5440
5441#undef IEM_STOS_CASE
5442
5443/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5444#define IEM_LODS_CASE(ValBits, AddrBits) \
5445 IEM_MC_BEGIN(0, 2); \
5446 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5447 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5448 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5449 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5450 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5452 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5453 } IEM_MC_ELSE() { \
5454 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5455 } IEM_MC_ENDIF(); \
5456 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5457 IEM_MC_END()
5458
5459/**
5460 * @opcode 0xac
5461 */
5462FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5463{
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465
5466 /*
5467 * Use the C implementation if a repeat prefix is encountered.
5468 */
5469 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5470 {
5471 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5472 switch (pVCpu->iem.s.enmEffAddrMode)
5473 {
5474 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5475 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5476 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5478 }
5479 }
5480 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5481
5482 /*
5483 * Sharing case implementation with stos[wdq] below.
5484 */
5485 switch (pVCpu->iem.s.enmEffAddrMode)
5486 {
5487 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5488 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5489 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5491 }
5492}
5493
5494
5495/**
5496 * @opcode 0xad
5497 */
5498FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5499{
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5501
5502 /*
5503 * Use the C implementation if a repeat prefix is encountered.
5504 */
5505 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5506 {
5507 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5508 switch (pVCpu->iem.s.enmEffOpSize)
5509 {
5510 case IEMMODE_16BIT:
5511 switch (pVCpu->iem.s.enmEffAddrMode)
5512 {
5513 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5514 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5515 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5517 }
5518 break;
5519 case IEMMODE_32BIT:
5520 switch (pVCpu->iem.s.enmEffAddrMode)
5521 {
5522 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5523 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5524 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5526 }
5527 case IEMMODE_64BIT:
5528 switch (pVCpu->iem.s.enmEffAddrMode)
5529 {
5530 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5531 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5532 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5536 }
5537 }
5538 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5539
5540 /*
5541 * Annoying double switch here.
5542 * Using ugly macro for implementing the cases, sharing it with lodsb.
5543 */
5544 switch (pVCpu->iem.s.enmEffOpSize)
5545 {
5546 case IEMMODE_16BIT:
5547 switch (pVCpu->iem.s.enmEffAddrMode)
5548 {
5549 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5550 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5551 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5553 }
5554 break;
5555
5556 case IEMMODE_32BIT:
5557 switch (pVCpu->iem.s.enmEffAddrMode)
5558 {
5559 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5560 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5561 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5563 }
5564 break;
5565
5566 case IEMMODE_64BIT:
5567 switch (pVCpu->iem.s.enmEffAddrMode)
5568 {
5569 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5570 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5571 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5573 }
5574 break;
5575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5576 }
5577}
5578
5579#undef IEM_LODS_CASE
5580
5581/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5582#define IEM_SCAS_CASE(ValBits, AddrBits) \
5583 IEM_MC_BEGIN(3, 2); \
5584 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5585 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5586 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5587 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5588 \
5589 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5590 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5591 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5592 IEM_MC_REF_EFLAGS(pEFlags); \
5593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5594 \
5595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5596 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5597 } IEM_MC_ELSE() { \
5598 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5599 } IEM_MC_ENDIF(); \
5600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5601 IEM_MC_END()
5602
5603/**
5604 * @opcode 0xae
5605 */
5606FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5607{
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5609
5610 /*
5611 * Use the C implementation if a repeat prefix is encountered.
5612 */
5613 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5614 {
5615 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5616 switch (pVCpu->iem.s.enmEffAddrMode)
5617 {
5618 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5619 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5620 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5625 {
5626 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5627 switch (pVCpu->iem.s.enmEffAddrMode)
5628 {
5629 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5630 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5631 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5633 }
5634 }
5635 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5636
5637 /*
5638 * Sharing case implementation with stos[wdq] below.
5639 */
5640 switch (pVCpu->iem.s.enmEffAddrMode)
5641 {
5642 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5643 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5644 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5646 }
5647}
5648
5649
5650/**
5651 * @opcode 0xaf
5652 */
5653FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5654{
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656
5657 /*
5658 * Use the C implementation if a repeat prefix is encountered.
5659 */
5660 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5661 {
5662 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5663 switch (pVCpu->iem.s.enmEffOpSize)
5664 {
5665 case IEMMODE_16BIT:
5666 switch (pVCpu->iem.s.enmEffAddrMode)
5667 {
5668 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5669 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5670 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5672 }
5673 break;
5674 case IEMMODE_32BIT:
5675 switch (pVCpu->iem.s.enmEffAddrMode)
5676 {
5677 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5678 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5679 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5681 }
5682 case IEMMODE_64BIT:
5683 switch (pVCpu->iem.s.enmEffAddrMode)
5684 {
5685 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5689 }
5690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5691 }
5692 }
5693 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5694 {
5695 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5696 switch (pVCpu->iem.s.enmEffOpSize)
5697 {
5698 case IEMMODE_16BIT:
5699 switch (pVCpu->iem.s.enmEffAddrMode)
5700 {
5701 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5702 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5703 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5705 }
5706 break;
5707 case IEMMODE_32BIT:
5708 switch (pVCpu->iem.s.enmEffAddrMode)
5709 {
5710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5714 }
5715 case IEMMODE_64BIT:
5716 switch (pVCpu->iem.s.enmEffAddrMode)
5717 {
5718 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5719 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5720 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5722 }
5723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5724 }
5725 }
5726 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5727
5728 /*
5729 * Annoying double switch here.
5730 * Using ugly macro for implementing the cases, sharing it with scasb.
5731 */
5732 switch (pVCpu->iem.s.enmEffOpSize)
5733 {
5734 case IEMMODE_16BIT:
5735 switch (pVCpu->iem.s.enmEffAddrMode)
5736 {
5737 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5738 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5739 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 break;
5743
5744 case IEMMODE_32BIT:
5745 switch (pVCpu->iem.s.enmEffAddrMode)
5746 {
5747 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5748 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5749 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5751 }
5752 break;
5753
5754 case IEMMODE_64BIT:
5755 switch (pVCpu->iem.s.enmEffAddrMode)
5756 {
5757 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5758 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5759 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5761 }
5762 break;
5763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5764 }
5765}
5766
5767#undef IEM_SCAS_CASE
5768
5769/**
5770 * Common 'mov r8, imm8' helper.
5771 */
5772FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5773{
5774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776
5777 IEM_MC_BEGIN(0, 1);
5778 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5779 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5780 IEM_MC_ADVANCE_RIP_AND_FINISH();
5781 IEM_MC_END();
5782}
5783
5784
5785/**
5786 * @opcode 0xb0
5787 */
5788FNIEMOP_DEF(iemOp_mov_AL_Ib)
5789{
5790 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5791 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5792}
5793
5794
5795/**
5796 * @opcode 0xb1
5797 */
5798FNIEMOP_DEF(iemOp_CL_Ib)
5799{
5800 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5801 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5802}
5803
5804
5805/**
5806 * @opcode 0xb2
5807 */
5808FNIEMOP_DEF(iemOp_DL_Ib)
5809{
5810 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5811 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5812}
5813
5814
5815/**
5816 * @opcode 0xb3
5817 */
5818FNIEMOP_DEF(iemOp_BL_Ib)
5819{
5820 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5821 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5822}
5823
5824
5825/**
5826 * @opcode 0xb4
5827 */
5828FNIEMOP_DEF(iemOp_mov_AH_Ib)
5829{
5830 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5831 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5832}
5833
5834
5835/**
5836 * @opcode 0xb5
5837 */
5838FNIEMOP_DEF(iemOp_CH_Ib)
5839{
5840 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5841 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5842}
5843
5844
5845/**
5846 * @opcode 0xb6
5847 */
5848FNIEMOP_DEF(iemOp_DH_Ib)
5849{
5850 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5851 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5852}
5853
5854
5855/**
5856 * @opcode 0xb7
5857 */
5858FNIEMOP_DEF(iemOp_BH_Ib)
5859{
5860 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5861 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5862}
5863
5864
5865/**
5866 * Common 'mov regX,immX' helper.
5867 */
5868FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5869{
5870 switch (pVCpu->iem.s.enmEffOpSize)
5871 {
5872 case IEMMODE_16BIT:
5873 {
5874 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876
5877 IEM_MC_BEGIN(0, 1);
5878 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5879 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5880 IEM_MC_ADVANCE_RIP_AND_FINISH();
5881 IEM_MC_END();
5882 break;
5883 }
5884
5885 case IEMMODE_32BIT:
5886 {
5887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5889
5890 IEM_MC_BEGIN(0, 1);
5891 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5892 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5893 IEM_MC_ADVANCE_RIP_AND_FINISH();
5894 IEM_MC_END();
5895 break;
5896 }
5897 case IEMMODE_64BIT:
5898 {
5899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901
5902 IEM_MC_BEGIN(0, 1);
5903 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5904 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5905 IEM_MC_ADVANCE_RIP_AND_FINISH();
5906 IEM_MC_END();
5907 break;
5908 }
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911}
5912
5913
5914/**
5915 * @opcode 0xb8
5916 */
5917FNIEMOP_DEF(iemOp_eAX_Iv)
5918{
5919 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5920 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5921}
5922
5923
5924/**
5925 * @opcode 0xb9
5926 */
5927FNIEMOP_DEF(iemOp_eCX_Iv)
5928{
5929 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5930 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5931}
5932
5933
5934/**
5935 * @opcode 0xba
5936 */
5937FNIEMOP_DEF(iemOp_eDX_Iv)
5938{
5939 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5940 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5941}
5942
5943
5944/**
5945 * @opcode 0xbb
5946 */
5947FNIEMOP_DEF(iemOp_eBX_Iv)
5948{
5949 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5950 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5951}
5952
5953
5954/**
5955 * @opcode 0xbc
5956 */
5957FNIEMOP_DEF(iemOp_eSP_Iv)
5958{
5959 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5960 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5961}
5962
5963
5964/**
5965 * @opcode 0xbd
5966 */
5967FNIEMOP_DEF(iemOp_eBP_Iv)
5968{
5969 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5970 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5971}
5972
5973
5974/**
5975 * @opcode 0xbe
5976 */
5977FNIEMOP_DEF(iemOp_eSI_Iv)
5978{
5979 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5980 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5981}
5982
5983
5984/**
5985 * @opcode 0xbf
5986 */
5987FNIEMOP_DEF(iemOp_eDI_Iv)
5988{
5989 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5990 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5991}
5992
5993
5994/**
5995 * @opcode 0xc0
5996 */
5997FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5998{
5999 IEMOP_HLP_MIN_186();
6000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6001 PCIEMOPSHIFTSIZES pImpl;
6002 switch (IEM_GET_MODRM_REG_8(bRm))
6003 {
6004 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6005 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6006 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6007 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6008 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6009 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6010 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6011 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6012 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6013 }
6014 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6015
6016 if (IEM_IS_MODRM_REG_MODE(bRm))
6017 {
6018 /* register */
6019 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6021 IEM_MC_BEGIN(3, 0);
6022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6023 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6024 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6025 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6026 IEM_MC_REF_EFLAGS(pEFlags);
6027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6028 IEM_MC_ADVANCE_RIP_AND_FINISH();
6029 IEM_MC_END();
6030 }
6031 else
6032 {
6033 /* memory */
6034 IEM_MC_BEGIN(3, 2);
6035 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6036 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6039
6040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6041 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6042 IEM_MC_ASSIGN(cShiftArg, cShift);
6043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6044 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6045 IEM_MC_FETCH_EFLAGS(EFlags);
6046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6047
6048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6049 IEM_MC_COMMIT_EFLAGS(EFlags);
6050 IEM_MC_ADVANCE_RIP_AND_FINISH();
6051 IEM_MC_END();
6052 }
6053}
6054
6055
6056/**
6057 * @opcode 0xc1
6058 */
6059FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6060{
6061 IEMOP_HLP_MIN_186();
6062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6063 PCIEMOPSHIFTSIZES pImpl;
6064 switch (IEM_GET_MODRM_REG_8(bRm))
6065 {
6066 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6067 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6068 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6069 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6070 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6071 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6072 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6073 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6074 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6075 }
6076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6077
6078 if (IEM_IS_MODRM_REG_MODE(bRm))
6079 {
6080 /* register */
6081 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6083 switch (pVCpu->iem.s.enmEffOpSize)
6084 {
6085 case IEMMODE_16BIT:
6086 IEM_MC_BEGIN(3, 0);
6087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6088 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6090 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6091 IEM_MC_REF_EFLAGS(pEFlags);
6092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6093 IEM_MC_ADVANCE_RIP_AND_FINISH();
6094 IEM_MC_END();
6095 break;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(3, 0);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6101 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6102 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6103 IEM_MC_REF_EFLAGS(pEFlags);
6104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6105 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6106 IEM_MC_ADVANCE_RIP_AND_FINISH();
6107 IEM_MC_END();
6108 break;
6109
6110 case IEMMODE_64BIT:
6111 IEM_MC_BEGIN(3, 0);
6112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6113 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6115 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6118 IEM_MC_ADVANCE_RIP_AND_FINISH();
6119 IEM_MC_END();
6120 break;
6121
6122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6123 }
6124 }
6125 else
6126 {
6127 /* memory */
6128 switch (pVCpu->iem.s.enmEffOpSize)
6129 {
6130 case IEMMODE_16BIT:
6131 IEM_MC_BEGIN(3, 2);
6132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6133 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6134 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6136
6137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6138 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6139 IEM_MC_ASSIGN(cShiftArg, cShift);
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6142 IEM_MC_FETCH_EFLAGS(EFlags);
6143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6144
6145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6146 IEM_MC_COMMIT_EFLAGS(EFlags);
6147 IEM_MC_ADVANCE_RIP_AND_FINISH();
6148 IEM_MC_END();
6149 break;
6150
6151 case IEMMODE_32BIT:
6152 IEM_MC_BEGIN(3, 2);
6153 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6154 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6155 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6157
6158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6159 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6160 IEM_MC_ASSIGN(cShiftArg, cShift);
6161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6162 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6163 IEM_MC_FETCH_EFLAGS(EFlags);
6164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6165
6166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6167 IEM_MC_COMMIT_EFLAGS(EFlags);
6168 IEM_MC_ADVANCE_RIP_AND_FINISH();
6169 IEM_MC_END();
6170 break;
6171
6172 case IEMMODE_64BIT:
6173 IEM_MC_BEGIN(3, 2);
6174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6175 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6176 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6178
6179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6180 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6181 IEM_MC_ASSIGN(cShiftArg, cShift);
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6184 IEM_MC_FETCH_EFLAGS(EFlags);
6185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6186
6187 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6188 IEM_MC_COMMIT_EFLAGS(EFlags);
6189 IEM_MC_ADVANCE_RIP_AND_FINISH();
6190 IEM_MC_END();
6191 break;
6192
6193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6194 }
6195 }
6196}
6197
6198
6199/**
6200 * @opcode 0xc2
6201 */
6202FNIEMOP_DEF(iemOp_retn_Iw)
6203{
6204 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6205 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6208 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6209}
6210
6211
6212/**
6213 * @opcode 0xc3
6214 */
6215FNIEMOP_DEF(iemOp_retn)
6216{
6217 IEMOP_MNEMONIC(retn, "retn");
6218 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6221}
6222
6223
6224/**
6225 * @opcode 0xc4
6226 */
6227FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6228{
6229 /* The LDS instruction is invalid 64-bit mode. In legacy and
6230 compatability mode it is invalid with MOD=3.
6231 The use as a VEX prefix is made possible by assigning the inverted
6232 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6233 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6235 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6236 || IEM_IS_MODRM_REG_MODE(bRm) )
6237 {
6238 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6239 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6240 {
6241 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6242 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6243 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6244 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6245 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6246 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6247 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6248 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6249 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6250 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6251 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6252 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6253 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6254
6255 switch (bRm & 0x1f)
6256 {
6257 case 1: /* 0x0f lead opcode byte. */
6258#ifdef IEM_WITH_VEX
6259 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6260#else
6261 IEMOP_BITCH_ABOUT_STUB();
6262 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6263#endif
6264
6265 case 2: /* 0x0f 0x38 lead opcode bytes. */
6266#ifdef IEM_WITH_VEX
6267 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6268#else
6269 IEMOP_BITCH_ABOUT_STUB();
6270 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6271#endif
6272
6273 case 3: /* 0x0f 0x3a lead opcode bytes. */
6274#ifdef IEM_WITH_VEX
6275 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6276#else
6277 IEMOP_BITCH_ABOUT_STUB();
6278 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6279#endif
6280
6281 default:
6282 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6283 return IEMOP_RAISE_INVALID_OPCODE();
6284 }
6285 }
6286 Log(("VEX3: AVX support disabled!\n"));
6287 return IEMOP_RAISE_INVALID_OPCODE();
6288 }
6289
6290 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6291 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6292}
6293
6294
6295/**
6296 * @opcode 0xc5
6297 */
6298FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6299{
6300 /* The LES instruction is invalid 64-bit mode. In legacy and
6301 compatability mode it is invalid with MOD=3.
6302 The use as a VEX prefix is made possible by assigning the inverted
6303 REX.R to the top MOD bit, and the top bit in the inverted register
6304 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6305 to accessing registers 0..7 in this VEX form. */
6306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6307 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6308 || IEM_IS_MODRM_REG_MODE(bRm))
6309 {
6310 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6311 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6312 {
6313 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6314 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6315 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6316 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6317 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6318 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6319 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6320 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6321
6322#ifdef IEM_WITH_VEX
6323 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6324#else
6325 IEMOP_BITCH_ABOUT_STUB();
6326 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6327#endif
6328 }
6329
6330 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6331 Log(("VEX2: AVX support disabled!\n"));
6332 return IEMOP_RAISE_INVALID_OPCODE();
6333 }
6334
6335 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6336 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6337}
6338
6339
6340/**
6341 * @opcode 0xc6
6342 */
6343FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6344{
6345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6346 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6347 return IEMOP_RAISE_INVALID_OPCODE();
6348 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6349
6350 if (IEM_IS_MODRM_REG_MODE(bRm))
6351 {
6352 /* register access */
6353 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6355 IEM_MC_BEGIN(0, 0);
6356 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
6357 IEM_MC_ADVANCE_RIP_AND_FINISH();
6358 IEM_MC_END();
6359 }
6360 else
6361 {
6362 /* memory access. */
6363 IEM_MC_BEGIN(0, 1);
6364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6366 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6368 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6369 IEM_MC_ADVANCE_RIP_AND_FINISH();
6370 IEM_MC_END();
6371 }
6372}
6373
6374
6375/**
6376 * @opcode 0xc7
6377 */
6378FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6379{
6380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6381 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6382 return IEMOP_RAISE_INVALID_OPCODE();
6383 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6384
6385 if (IEM_IS_MODRM_REG_MODE(bRm))
6386 {
6387 /* register access */
6388 switch (pVCpu->iem.s.enmEffOpSize)
6389 {
6390 case IEMMODE_16BIT:
6391 IEM_MC_BEGIN(0, 0);
6392 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6394 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
6395 IEM_MC_ADVANCE_RIP_AND_FINISH();
6396 IEM_MC_END();
6397 break;
6398
6399 case IEMMODE_32BIT:
6400 IEM_MC_BEGIN(0, 0);
6401 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6403 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
6404 IEM_MC_ADVANCE_RIP_AND_FINISH();
6405 IEM_MC_END();
6406 break;
6407
6408 case IEMMODE_64BIT:
6409 IEM_MC_BEGIN(0, 0);
6410 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6412 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
6413 IEM_MC_ADVANCE_RIP_AND_FINISH();
6414 IEM_MC_END();
6415 break;
6416
6417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6418 }
6419 }
6420 else
6421 {
6422 /* memory access. */
6423 switch (pVCpu->iem.s.enmEffOpSize)
6424 {
6425 case IEMMODE_16BIT:
6426 IEM_MC_BEGIN(0, 1);
6427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6429 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6431 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6432 IEM_MC_ADVANCE_RIP_AND_FINISH();
6433 IEM_MC_END();
6434 break;
6435
6436 case IEMMODE_32BIT:
6437 IEM_MC_BEGIN(0, 1);
6438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6440 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6442 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6443 IEM_MC_ADVANCE_RIP_AND_FINISH();
6444 IEM_MC_END();
6445 break;
6446
6447 case IEMMODE_64BIT:
6448 IEM_MC_BEGIN(0, 1);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6451 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6454 IEM_MC_ADVANCE_RIP_AND_FINISH();
6455 IEM_MC_END();
6456 break;
6457
6458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6459 }
6460 }
6461}
6462
6463
6464
6465
6466/**
6467 * @opcode 0xc8
6468 */
6469FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6470{
6471 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6472 IEMOP_HLP_MIN_186();
6473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6474 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6475 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6477 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6478}
6479
6480
6481/**
6482 * @opcode 0xc9
6483 */
6484FNIEMOP_DEF(iemOp_leave)
6485{
6486 IEMOP_MNEMONIC(leave, "leave");
6487 IEMOP_HLP_MIN_186();
6488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6491}
6492
6493
6494/**
6495 * @opcode 0xca
6496 */
6497FNIEMOP_DEF(iemOp_retf_Iw)
6498{
6499 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6500 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6503 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6504}
6505
6506
6507/**
6508 * @opcode 0xcb
6509 */
6510FNIEMOP_DEF(iemOp_retf)
6511{
6512 IEMOP_MNEMONIC(retf, "retf");
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6515 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6516}
6517
6518
6519/**
6520 * @opcode 0xcc
6521 */
6522FNIEMOP_DEF(iemOp_int3)
6523{
6524 IEMOP_MNEMONIC(int3, "int3");
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6527}
6528
6529
6530/**
6531 * @opcode 0xcd
6532 */
6533FNIEMOP_DEF(iemOp_int_Ib)
6534{
6535 IEMOP_MNEMONIC(int_Ib, "int Ib");
6536 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6538 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6539}
6540
6541
6542/**
6543 * @opcode 0xce
6544 */
6545FNIEMOP_DEF(iemOp_into)
6546{
6547 IEMOP_MNEMONIC(into, "into");
6548 IEMOP_HLP_NO_64BIT();
6549
6550 IEM_MC_BEGIN(2, 0);
6551 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6552 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6553 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556}
6557
6558
6559/**
6560 * @opcode 0xcf
6561 */
6562FNIEMOP_DEF(iemOp_iret)
6563{
6564 IEMOP_MNEMONIC(iret, "iret");
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6566 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6567}
6568
6569
6570/**
6571 * @opcode 0xd0
6572 */
6573FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6574{
6575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6576 PCIEMOPSHIFTSIZES pImpl;
6577 switch (IEM_GET_MODRM_REG_8(bRm))
6578 {
6579 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6580 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6581 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6582 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6583 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6584 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6585 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6586 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6587 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6588 }
6589 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6590
6591 if (IEM_IS_MODRM_REG_MODE(bRm))
6592 {
6593 /* register */
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_BEGIN(3, 0);
6596 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6597 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6598 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6599 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6602 IEM_MC_ADVANCE_RIP_AND_FINISH();
6603 IEM_MC_END();
6604 }
6605 else
6606 {
6607 /* memory */
6608 IEM_MC_BEGIN(3, 2);
6609 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6610 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6611 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6613
6614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6616 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6617 IEM_MC_FETCH_EFLAGS(EFlags);
6618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6619
6620 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6621 IEM_MC_COMMIT_EFLAGS(EFlags);
6622 IEM_MC_ADVANCE_RIP_AND_FINISH();
6623 IEM_MC_END();
6624 }
6625}
6626
6627
6628
6629/**
6630 * @opcode 0xd1
6631 */
6632FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6633{
6634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6635 PCIEMOPSHIFTSIZES pImpl;
6636 switch (IEM_GET_MODRM_REG_8(bRm))
6637 {
6638 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6639 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6640 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6641 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6642 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6643 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6644 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6645 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6646 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6647 }
6648 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6649
6650 if (IEM_IS_MODRM_REG_MODE(bRm))
6651 {
6652 /* register */
6653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6654 switch (pVCpu->iem.s.enmEffOpSize)
6655 {
6656 case IEMMODE_16BIT:
6657 IEM_MC_BEGIN(3, 0);
6658 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6659 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6660 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6661 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6662 IEM_MC_REF_EFLAGS(pEFlags);
6663 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6664 IEM_MC_ADVANCE_RIP_AND_FINISH();
6665 IEM_MC_END();
6666 break;
6667
6668 case IEMMODE_32BIT:
6669 IEM_MC_BEGIN(3, 0);
6670 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6671 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6673 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6674 IEM_MC_REF_EFLAGS(pEFlags);
6675 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6676 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6677 IEM_MC_ADVANCE_RIP_AND_FINISH();
6678 IEM_MC_END();
6679 break;
6680
6681 case IEMMODE_64BIT:
6682 IEM_MC_BEGIN(3, 0);
6683 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6684 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6685 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6686 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6687 IEM_MC_REF_EFLAGS(pEFlags);
6688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6689 IEM_MC_ADVANCE_RIP_AND_FINISH();
6690 IEM_MC_END();
6691 break;
6692
6693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6694 }
6695 }
6696 else
6697 {
6698 /* memory */
6699 switch (pVCpu->iem.s.enmEffOpSize)
6700 {
6701 case IEMMODE_16BIT:
6702 IEM_MC_BEGIN(3, 2);
6703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6704 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6707
6708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6710 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6711 IEM_MC_FETCH_EFLAGS(EFlags);
6712 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6713
6714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6715 IEM_MC_COMMIT_EFLAGS(EFlags);
6716 IEM_MC_ADVANCE_RIP_AND_FINISH();
6717 IEM_MC_END();
6718 break;
6719
6720 case IEMMODE_32BIT:
6721 IEM_MC_BEGIN(3, 2);
6722 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6723 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6724 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6726
6727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6730 IEM_MC_FETCH_EFLAGS(EFlags);
6731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6732
6733 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6734 IEM_MC_COMMIT_EFLAGS(EFlags);
6735 IEM_MC_ADVANCE_RIP_AND_FINISH();
6736 IEM_MC_END();
6737 break;
6738
6739 case IEMMODE_64BIT:
6740 IEM_MC_BEGIN(3, 2);
6741 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6742 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6743 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6745
6746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6749 IEM_MC_FETCH_EFLAGS(EFlags);
6750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6751
6752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6753 IEM_MC_COMMIT_EFLAGS(EFlags);
6754 IEM_MC_ADVANCE_RIP_AND_FINISH();
6755 IEM_MC_END();
6756 break;
6757
6758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6759 }
6760 }
6761}
6762
6763
6764/**
6765 * @opcode 0xd2
6766 */
6767FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6768{
6769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6770 PCIEMOPSHIFTSIZES pImpl;
6771 switch (IEM_GET_MODRM_REG_8(bRm))
6772 {
6773 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6774 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6775 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6776 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6777 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6778 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6779 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6780 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6781 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6782 }
6783 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6784
6785 if (IEM_IS_MODRM_REG_MODE(bRm))
6786 {
6787 /* register */
6788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6789 IEM_MC_BEGIN(3, 0);
6790 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6791 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6792 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6793 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6794 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6795 IEM_MC_REF_EFLAGS(pEFlags);
6796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6797 IEM_MC_ADVANCE_RIP_AND_FINISH();
6798 IEM_MC_END();
6799 }
6800 else
6801 {
6802 /* memory */
6803 IEM_MC_BEGIN(3, 2);
6804 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6805 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6806 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6808
6809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6811 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6812 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6813 IEM_MC_FETCH_EFLAGS(EFlags);
6814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6815
6816 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6817 IEM_MC_COMMIT_EFLAGS(EFlags);
6818 IEM_MC_ADVANCE_RIP_AND_FINISH();
6819 IEM_MC_END();
6820 }
6821}
6822
6823
6824/**
6825 * @opcode 0xd3
6826 */
6827FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6828{
6829 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6830 PCIEMOPSHIFTSIZES pImpl;
6831 switch (IEM_GET_MODRM_REG_8(bRm))
6832 {
6833 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6834 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6835 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6836 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6837 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6838 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6839 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6840 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6841 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6842 }
6843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6844
6845 if (IEM_IS_MODRM_REG_MODE(bRm))
6846 {
6847 /* register */
6848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6849 switch (pVCpu->iem.s.enmEffOpSize)
6850 {
6851 case IEMMODE_16BIT:
6852 IEM_MC_BEGIN(3, 0);
6853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6854 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6856 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6857 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6858 IEM_MC_REF_EFLAGS(pEFlags);
6859 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6860 IEM_MC_ADVANCE_RIP_AND_FINISH();
6861 IEM_MC_END();
6862 break;
6863
6864 case IEMMODE_32BIT:
6865 IEM_MC_BEGIN(3, 0);
6866 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6867 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6869 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6870 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6871 IEM_MC_REF_EFLAGS(pEFlags);
6872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6873 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6874 IEM_MC_ADVANCE_RIP_AND_FINISH();
6875 IEM_MC_END();
6876 break;
6877
6878 case IEMMODE_64BIT:
6879 IEM_MC_BEGIN(3, 0);
6880 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6881 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6882 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6883 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6884 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6885 IEM_MC_REF_EFLAGS(pEFlags);
6886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6887 IEM_MC_ADVANCE_RIP_AND_FINISH();
6888 IEM_MC_END();
6889 break;
6890
6891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6892 }
6893 }
6894 else
6895 {
6896 /* memory */
6897 switch (pVCpu->iem.s.enmEffOpSize)
6898 {
6899 case IEMMODE_16BIT:
6900 IEM_MC_BEGIN(3, 2);
6901 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6902 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6903 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6905
6906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6908 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6909 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6910 IEM_MC_FETCH_EFLAGS(EFlags);
6911 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6912
6913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6914 IEM_MC_COMMIT_EFLAGS(EFlags);
6915 IEM_MC_ADVANCE_RIP_AND_FINISH();
6916 IEM_MC_END();
6917 break;
6918
6919 case IEMMODE_32BIT:
6920 IEM_MC_BEGIN(3, 2);
6921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6922 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6923 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6925
6926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6928 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6929 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6930 IEM_MC_FETCH_EFLAGS(EFlags);
6931 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6932
6933 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6934 IEM_MC_COMMIT_EFLAGS(EFlags);
6935 IEM_MC_ADVANCE_RIP_AND_FINISH();
6936 IEM_MC_END();
6937 break;
6938
6939 case IEMMODE_64BIT:
6940 IEM_MC_BEGIN(3, 2);
6941 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6942 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6943 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6945
6946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6948 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6949 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6950 IEM_MC_FETCH_EFLAGS(EFlags);
6951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6952
6953 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6954 IEM_MC_COMMIT_EFLAGS(EFlags);
6955 IEM_MC_ADVANCE_RIP_AND_FINISH();
6956 IEM_MC_END();
6957 break;
6958
6959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6960 }
6961 }
6962}
6963
6964/**
6965 * @opcode 0xd4
6966 */
6967FNIEMOP_DEF(iemOp_aam_Ib)
6968{
6969 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6970 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6972 IEMOP_HLP_NO_64BIT();
6973 if (!bImm)
6974 return IEMOP_RAISE_DIVIDE_ERROR();
6975 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6976}
6977
6978
6979/**
6980 * @opcode 0xd5
6981 */
6982FNIEMOP_DEF(iemOp_aad_Ib)
6983{
6984 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6985 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6987 IEMOP_HLP_NO_64BIT();
6988 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6989}
6990
6991
6992/**
6993 * @opcode 0xd6
6994 */
6995FNIEMOP_DEF(iemOp_salc)
6996{
6997 IEMOP_MNEMONIC(salc, "salc");
6998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6999 IEMOP_HLP_NO_64BIT();
7000
7001 IEM_MC_BEGIN(0, 0);
7002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7003 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7004 } IEM_MC_ELSE() {
7005 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7006 } IEM_MC_ENDIF();
7007 IEM_MC_ADVANCE_RIP_AND_FINISH();
7008 IEM_MC_END();
7009}
7010
7011
7012/**
7013 * @opcode 0xd7
7014 */
7015FNIEMOP_DEF(iemOp_xlat)
7016{
7017 IEMOP_MNEMONIC(xlat, "xlat");
7018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7019 switch (pVCpu->iem.s.enmEffAddrMode)
7020 {
7021 case IEMMODE_16BIT:
7022 IEM_MC_BEGIN(2, 0);
7023 IEM_MC_LOCAL(uint8_t, u8Tmp);
7024 IEM_MC_LOCAL(uint16_t, u16Addr);
7025 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7026 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7027 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7028 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7029 IEM_MC_ADVANCE_RIP_AND_FINISH();
7030 IEM_MC_END();
7031 break;
7032
7033 case IEMMODE_32BIT:
7034 IEM_MC_BEGIN(2, 0);
7035 IEM_MC_LOCAL(uint8_t, u8Tmp);
7036 IEM_MC_LOCAL(uint32_t, u32Addr);
7037 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7038 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7039 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7040 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7041 IEM_MC_ADVANCE_RIP_AND_FINISH();
7042 IEM_MC_END();
7043 break;
7044
7045 case IEMMODE_64BIT:
7046 IEM_MC_BEGIN(2, 0);
7047 IEM_MC_LOCAL(uint8_t, u8Tmp);
7048 IEM_MC_LOCAL(uint64_t, u64Addr);
7049 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7050 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7051 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7052 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7053 IEM_MC_ADVANCE_RIP_AND_FINISH();
7054 IEM_MC_END();
7055 break;
7056
7057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7058 }
7059}
7060
7061
7062/**
7063 * Common worker for FPU instructions working on ST0 and STn, and storing the
7064 * result in ST0.
7065 *
7066 * @param bRm Mod R/M byte.
7067 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7068 */
7069FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7070{
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072
7073 IEM_MC_BEGIN(3, 1);
7074 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7075 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7076 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7077 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7078
7079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7081 IEM_MC_PREPARE_FPU_USAGE();
7082 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7083 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7084 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7085 IEM_MC_ELSE()
7086 IEM_MC_FPU_STACK_UNDERFLOW(0);
7087 IEM_MC_ENDIF();
7088 IEM_MC_ADVANCE_RIP_AND_FINISH();
7089
7090 IEM_MC_END();
7091}
7092
7093
7094/**
7095 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7096 * flags.
7097 *
7098 * @param bRm Mod R/M byte.
7099 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7100 */
7101FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7102{
7103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7104
7105 IEM_MC_BEGIN(3, 1);
7106 IEM_MC_LOCAL(uint16_t, u16Fsw);
7107 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7109 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7110
7111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7113 IEM_MC_PREPARE_FPU_USAGE();
7114 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7115 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7116 IEM_MC_UPDATE_FSW(u16Fsw);
7117 IEM_MC_ELSE()
7118 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7119 IEM_MC_ENDIF();
7120 IEM_MC_ADVANCE_RIP_AND_FINISH();
7121
7122 IEM_MC_END();
7123}
7124
7125
7126/**
7127 * Common worker for FPU instructions working on ST0 and STn, only affecting
7128 * flags, and popping when done.
7129 *
7130 * @param bRm Mod R/M byte.
7131 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7132 */
7133FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7134{
7135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7136
7137 IEM_MC_BEGIN(3, 1);
7138 IEM_MC_LOCAL(uint16_t, u16Fsw);
7139 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7140 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7141 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7142
7143 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7144 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7145 IEM_MC_PREPARE_FPU_USAGE();
7146 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7147 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7148 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7149 IEM_MC_ELSE()
7150 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7151 IEM_MC_ENDIF();
7152 IEM_MC_ADVANCE_RIP_AND_FINISH();
7153
7154 IEM_MC_END();
7155}
7156
7157
7158/** Opcode 0xd8 11/0. */
7159FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7160{
7161 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7162 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7163}
7164
7165
7166/** Opcode 0xd8 11/1. */
7167FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7168{
7169 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7171}
7172
7173
7174/** Opcode 0xd8 11/2. */
7175FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7176{
7177 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7178 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7179}
7180
7181
7182/** Opcode 0xd8 11/3. */
7183FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7184{
7185 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7186 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7187}
7188
7189
7190/** Opcode 0xd8 11/4. */
7191FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7192{
7193 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7194 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7195}
7196
7197
7198/** Opcode 0xd8 11/5. */
7199FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7200{
7201 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7202 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7203}
7204
7205
7206/** Opcode 0xd8 11/6. */
7207FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7208{
7209 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7210 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7211}
7212
7213
7214/** Opcode 0xd8 11/7. */
7215FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7216{
7217 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7218 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7219}
7220
7221
7222/**
7223 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7224 * the result in ST0.
7225 *
7226 * @param bRm Mod R/M byte.
7227 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7228 */
7229FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7230{
7231 IEM_MC_BEGIN(3, 3);
7232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7233 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7234 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7235 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7237 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7238
7239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7241
7242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7244 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7245
7246 IEM_MC_PREPARE_FPU_USAGE();
7247 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7248 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7249 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7250 IEM_MC_ELSE()
7251 IEM_MC_FPU_STACK_UNDERFLOW(0);
7252 IEM_MC_ENDIF();
7253 IEM_MC_ADVANCE_RIP_AND_FINISH();
7254
7255 IEM_MC_END();
7256}
7257
7258
7259/** Opcode 0xd8 !11/0. */
7260FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7261{
7262 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7263 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7264}
7265
7266
7267/** Opcode 0xd8 !11/1. */
7268FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7269{
7270 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7271 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7272}
7273
7274
7275/** Opcode 0xd8 !11/2. */
7276FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7277{
7278 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7279
7280 IEM_MC_BEGIN(3, 3);
7281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7282 IEM_MC_LOCAL(uint16_t, u16Fsw);
7283 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7284 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7285 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7286 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7287
7288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7290
7291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7293 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7294
7295 IEM_MC_PREPARE_FPU_USAGE();
7296 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7297 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7298 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7299 IEM_MC_ELSE()
7300 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7301 IEM_MC_ENDIF();
7302 IEM_MC_ADVANCE_RIP_AND_FINISH();
7303
7304 IEM_MC_END();
7305}
7306
7307
7308/** Opcode 0xd8 !11/3. */
7309FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7310{
7311 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7312
7313 IEM_MC_BEGIN(3, 3);
7314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7315 IEM_MC_LOCAL(uint16_t, u16Fsw);
7316 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7317 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7318 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7319 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7320
7321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7323
7324 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7325 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7326 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7327
7328 IEM_MC_PREPARE_FPU_USAGE();
7329 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7330 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7331 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7332 IEM_MC_ELSE()
7333 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7334 IEM_MC_ENDIF();
7335 IEM_MC_ADVANCE_RIP_AND_FINISH();
7336
7337 IEM_MC_END();
7338}
7339
7340
7341/** Opcode 0xd8 !11/4. */
7342FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7343{
7344 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7345 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7346}
7347
7348
7349/** Opcode 0xd8 !11/5. */
7350FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7351{
7352 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7353 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7354}
7355
7356
7357/** Opcode 0xd8 !11/6. */
7358FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7359{
7360 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7361 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7362}
7363
7364
7365/** Opcode 0xd8 !11/7. */
7366FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7367{
7368 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7369 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7370}
7371
7372
7373/**
7374 * @opcode 0xd8
7375 */
7376FNIEMOP_DEF(iemOp_EscF0)
7377{
7378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7379 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7380
7381 if (IEM_IS_MODRM_REG_MODE(bRm))
7382 {
7383 switch (IEM_GET_MODRM_REG_8(bRm))
7384 {
7385 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7386 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7387 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7388 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7389 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7390 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7391 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7392 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7394 }
7395 }
7396 else
7397 {
7398 switch (IEM_GET_MODRM_REG_8(bRm))
7399 {
7400 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7401 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7402 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7403 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7404 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7405 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7406 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7407 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410 }
7411}
7412
7413
7414/** Opcode 0xd9 /0 mem32real
7415 * @sa iemOp_fld_m64r */
7416FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7417{
7418 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7419
7420 IEM_MC_BEGIN(2, 3);
7421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7422 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7423 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7424 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7425 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7426
7427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7429
7430 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7431 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7432 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7433
7434 IEM_MC_PREPARE_FPU_USAGE();
7435 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7436 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
7437 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7438 IEM_MC_ELSE()
7439 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7440 IEM_MC_ENDIF();
7441 IEM_MC_ADVANCE_RIP_AND_FINISH();
7442
7443 IEM_MC_END();
7444}
7445
7446
7447/** Opcode 0xd9 !11/2 mem32real */
7448FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7449{
7450 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7451 IEM_MC_BEGIN(3, 2);
7452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7453 IEM_MC_LOCAL(uint16_t, u16Fsw);
7454 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7455 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7457
7458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7460 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7461 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7462
7463 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7464 IEM_MC_PREPARE_FPU_USAGE();
7465 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7466 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7467 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7468 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7469 IEM_MC_ELSE()
7470 IEM_MC_IF_FCW_IM()
7471 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7472 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7473 IEM_MC_ENDIF();
7474 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7475 IEM_MC_ENDIF();
7476 IEM_MC_ADVANCE_RIP_AND_FINISH();
7477
7478 IEM_MC_END();
7479}
7480
7481
7482/** Opcode 0xd9 !11/3 */
7483FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7484{
7485 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7486 IEM_MC_BEGIN(3, 2);
7487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7488 IEM_MC_LOCAL(uint16_t, u16Fsw);
7489 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7490 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7491 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7492
7493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7497
7498 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7499 IEM_MC_PREPARE_FPU_USAGE();
7500 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7501 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7502 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7503 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7504 IEM_MC_ELSE()
7505 IEM_MC_IF_FCW_IM()
7506 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7507 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7508 IEM_MC_ENDIF();
7509 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7510 IEM_MC_ENDIF();
7511 IEM_MC_ADVANCE_RIP_AND_FINISH();
7512
7513 IEM_MC_END();
7514}
7515
7516
7517/** Opcode 0xd9 !11/4 */
7518FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7519{
7520 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7521 IEM_MC_BEGIN(3, 0);
7522 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7523 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7524 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7529 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7530 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7531 IEM_MC_END();
7532 return VINF_SUCCESS;
7533}
7534
7535
7536/** Opcode 0xd9 !11/5 */
7537FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7538{
7539 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7540 IEM_MC_BEGIN(1, 1);
7541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7542 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7545 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7546 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7547 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7548 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7549 IEM_MC_END();
7550 return VINF_SUCCESS;
7551}
7552
7553
7554/** Opcode 0xd9 !11/6 */
7555FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7556{
7557 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7558 IEM_MC_BEGIN(3, 0);
7559 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7560 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7561 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7565 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7566 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7567 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7568 IEM_MC_END();
7569 return VINF_SUCCESS;
7570}
7571
7572
7573/** Opcode 0xd9 !11/7 */
7574FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7575{
7576 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7577 IEM_MC_BEGIN(2, 0);
7578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7579 IEM_MC_LOCAL(uint16_t, u16Fcw);
7580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7582 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7583 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7584 IEM_MC_FETCH_FCW(u16Fcw);
7585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7586 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7587 IEM_MC_END();
7588}
7589
7590
7591/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7592FNIEMOP_DEF(iemOp_fnop)
7593{
7594 IEMOP_MNEMONIC(fnop, "fnop");
7595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7596
7597 IEM_MC_BEGIN(0, 0);
7598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7599 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7600 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7601 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7602 * intel optimizations. Investigate. */
7603 IEM_MC_UPDATE_FPU_OPCODE_IP();
7604 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7605 IEM_MC_END();
7606}
7607
7608
7609/** Opcode 0xd9 11/0 stN */
7610FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7611{
7612 IEMOP_MNEMONIC(fld_stN, "fld stN");
7613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7614
7615 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7616 * indicates that it does. */
7617 IEM_MC_BEGIN(0, 2);
7618 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7622
7623 IEM_MC_PREPARE_FPU_USAGE();
7624 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm))
7625 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7626 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7627 IEM_MC_ELSE()
7628 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7629 IEM_MC_ENDIF();
7630
7631 IEM_MC_ADVANCE_RIP_AND_FINISH();
7632 IEM_MC_END();
7633}
7634
7635
7636/** Opcode 0xd9 11/3 stN */
7637FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7638{
7639 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7641
7642 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7643 * indicates that it does. */
7644 IEM_MC_BEGIN(1, 3);
7645 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7646 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7647 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7648 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
7649 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7650 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7651
7652 IEM_MC_PREPARE_FPU_USAGE();
7653 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7654 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7655 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
7656 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7657 IEM_MC_ELSE()
7658 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7659 IEM_MC_ENDIF();
7660
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 IEM_MC_END();
7663}
7664
7665
7666/** Opcode 0xd9 11/4, 0xdd 11/2. */
7667FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7668{
7669 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7671
7672 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7673 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
7674 if (!iDstReg)
7675 {
7676 IEM_MC_BEGIN(0, 1);
7677 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7680
7681 IEM_MC_PREPARE_FPU_USAGE();
7682 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7683 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7684 IEM_MC_ELSE()
7685 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7686 IEM_MC_ENDIF();
7687
7688 IEM_MC_ADVANCE_RIP_AND_FINISH();
7689 IEM_MC_END();
7690 }
7691 else
7692 {
7693 IEM_MC_BEGIN(0, 2);
7694 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7695 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7698
7699 IEM_MC_PREPARE_FPU_USAGE();
7700 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7701 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7702 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7703 IEM_MC_ELSE()
7704 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7705 IEM_MC_ENDIF();
7706
7707 IEM_MC_ADVANCE_RIP_AND_FINISH();
7708 IEM_MC_END();
7709 }
7710}
7711
7712
7713/**
7714 * Common worker for FPU instructions working on ST0 and replaces it with the
7715 * result, i.e. unary operators.
7716 *
7717 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7718 */
7719FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7720{
7721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7722
7723 IEM_MC_BEGIN(2, 1);
7724 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7725 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7726 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7727
7728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7730 IEM_MC_PREPARE_FPU_USAGE();
7731 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7732 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7733 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7734 IEM_MC_ELSE()
7735 IEM_MC_FPU_STACK_UNDERFLOW(0);
7736 IEM_MC_ENDIF();
7737 IEM_MC_ADVANCE_RIP_AND_FINISH();
7738
7739 IEM_MC_END();
7740}
7741
7742
7743/** Opcode 0xd9 0xe0. */
7744FNIEMOP_DEF(iemOp_fchs)
7745{
7746 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7747 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7748}
7749
7750
7751/** Opcode 0xd9 0xe1. */
7752FNIEMOP_DEF(iemOp_fabs)
7753{
7754 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7755 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7756}
7757
7758
7759/** Opcode 0xd9 0xe4. */
7760FNIEMOP_DEF(iemOp_ftst)
7761{
7762 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7764
7765 IEM_MC_BEGIN(2, 1);
7766 IEM_MC_LOCAL(uint16_t, u16Fsw);
7767 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7768 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7769
7770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7772 IEM_MC_PREPARE_FPU_USAGE();
7773 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7774 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
7775 IEM_MC_UPDATE_FSW(u16Fsw);
7776 IEM_MC_ELSE()
7777 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7778 IEM_MC_ENDIF();
7779 IEM_MC_ADVANCE_RIP_AND_FINISH();
7780
7781 IEM_MC_END();
7782}
7783
7784
7785/** Opcode 0xd9 0xe5. */
7786FNIEMOP_DEF(iemOp_fxam)
7787{
7788 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7790
7791 IEM_MC_BEGIN(2, 1);
7792 IEM_MC_LOCAL(uint16_t, u16Fsw);
7793 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7794 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7795
7796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7798 IEM_MC_PREPARE_FPU_USAGE();
7799 IEM_MC_REF_FPUREG(pr80Value, 0);
7800 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
7801 IEM_MC_UPDATE_FSW(u16Fsw);
7802 IEM_MC_ADVANCE_RIP_AND_FINISH();
7803
7804 IEM_MC_END();
7805}
7806
7807
7808/**
7809 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7810 *
7811 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7812 */
7813FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7814{
7815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7816
7817 IEM_MC_BEGIN(1, 1);
7818 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7819 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7820
7821 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7822 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7823 IEM_MC_PREPARE_FPU_USAGE();
7824 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7825 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7826 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7827 IEM_MC_ELSE()
7828 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7829 IEM_MC_ENDIF();
7830 IEM_MC_ADVANCE_RIP_AND_FINISH();
7831
7832 IEM_MC_END();
7833}
7834
7835
7836/** Opcode 0xd9 0xe8. */
7837FNIEMOP_DEF(iemOp_fld1)
7838{
7839 IEMOP_MNEMONIC(fld1, "fld1");
7840 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7841}
7842
7843
7844/** Opcode 0xd9 0xe9. */
7845FNIEMOP_DEF(iemOp_fldl2t)
7846{
7847 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7848 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7849}
7850
7851
7852/** Opcode 0xd9 0xea. */
7853FNIEMOP_DEF(iemOp_fldl2e)
7854{
7855 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7856 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7857}
7858
7859/** Opcode 0xd9 0xeb. */
7860FNIEMOP_DEF(iemOp_fldpi)
7861{
7862 IEMOP_MNEMONIC(fldpi, "fldpi");
7863 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7864}
7865
7866
7867/** Opcode 0xd9 0xec. */
7868FNIEMOP_DEF(iemOp_fldlg2)
7869{
7870 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7871 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7872}
7873
7874/** Opcode 0xd9 0xed. */
7875FNIEMOP_DEF(iemOp_fldln2)
7876{
7877 IEMOP_MNEMONIC(fldln2, "fldln2");
7878 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7879}
7880
7881
7882/** Opcode 0xd9 0xee. */
7883FNIEMOP_DEF(iemOp_fldz)
7884{
7885 IEMOP_MNEMONIC(fldz, "fldz");
7886 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7887}
7888
7889
7890/** Opcode 0xd9 0xf0.
7891 *
7892 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
7893 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
7894 * to produce proper results for +Inf and -Inf.
7895 *
7896 * This is probably usful in the implementation pow() and similar.
7897 */
7898FNIEMOP_DEF(iemOp_f2xm1)
7899{
7900 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7901 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7902}
7903
7904
7905/**
7906 * Common worker for FPU instructions working on STn and ST0, storing the result
7907 * in STn, and popping the stack unless IE, DE or ZE was raised.
7908 *
7909 * @param bRm Mod R/M byte.
7910 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7911 */
7912FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7913{
7914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7915
7916 IEM_MC_BEGIN(3, 1);
7917 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7918 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7919 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7920 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7921
7922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7923 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7924
7925 IEM_MC_PREPARE_FPU_USAGE();
7926 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
7927 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7928 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
7929 IEM_MC_ELSE()
7930 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
7931 IEM_MC_ENDIF();
7932 IEM_MC_ADVANCE_RIP_AND_FINISH();
7933
7934 IEM_MC_END();
7935}
7936
7937
7938/** Opcode 0xd9 0xf1. */
7939FNIEMOP_DEF(iemOp_fyl2x)
7940{
7941 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7942 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7943}
7944
7945
7946/**
7947 * Common worker for FPU instructions working on ST0 and having two outputs, one
7948 * replacing ST0 and one pushed onto the stack.
7949 *
7950 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7951 */
7952FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7953{
7954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7955
7956 IEM_MC_BEGIN(2, 1);
7957 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7958 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7959 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7960
7961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7963 IEM_MC_PREPARE_FPU_USAGE();
7964 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7965 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7966 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7967 IEM_MC_ELSE()
7968 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7969 IEM_MC_ENDIF();
7970 IEM_MC_ADVANCE_RIP_AND_FINISH();
7971
7972 IEM_MC_END();
7973}
7974
7975
7976/** Opcode 0xd9 0xf2. */
7977FNIEMOP_DEF(iemOp_fptan)
7978{
7979 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7980 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7981}
7982
7983
7984/** Opcode 0xd9 0xf3. */
7985FNIEMOP_DEF(iemOp_fpatan)
7986{
7987 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7988 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7989}
7990
7991
7992/** Opcode 0xd9 0xf4. */
7993FNIEMOP_DEF(iemOp_fxtract)
7994{
7995 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7996 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7997}
7998
7999
8000/** Opcode 0xd9 0xf5. */
8001FNIEMOP_DEF(iemOp_fprem1)
8002{
8003 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8004 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8005}
8006
8007
8008/** Opcode 0xd9 0xf6. */
8009FNIEMOP_DEF(iemOp_fdecstp)
8010{
8011 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8013 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8014 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8015 * FINCSTP and FDECSTP. */
8016
8017 IEM_MC_BEGIN(0,0);
8018
8019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8021
8022 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8023 IEM_MC_FPU_STACK_DEC_TOP();
8024 IEM_MC_UPDATE_FSW_CONST(0);
8025
8026 IEM_MC_ADVANCE_RIP_AND_FINISH();
8027 IEM_MC_END();
8028}
8029
8030
8031/** Opcode 0xd9 0xf7. */
8032FNIEMOP_DEF(iemOp_fincstp)
8033{
8034 IEMOP_MNEMONIC(fincstp, "fincstp");
8035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8036 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8037 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8038 * FINCSTP and FDECSTP. */
8039
8040 IEM_MC_BEGIN(0,0);
8041
8042 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8043 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8044
8045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8046 IEM_MC_FPU_STACK_INC_TOP();
8047 IEM_MC_UPDATE_FSW_CONST(0);
8048
8049 IEM_MC_ADVANCE_RIP_AND_FINISH();
8050 IEM_MC_END();
8051}
8052
8053
8054/** Opcode 0xd9 0xf8. */
8055FNIEMOP_DEF(iemOp_fprem)
8056{
8057 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8058 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8059}
8060
8061
8062/** Opcode 0xd9 0xf9. */
8063FNIEMOP_DEF(iemOp_fyl2xp1)
8064{
8065 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8066 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8067}
8068
8069
8070/** Opcode 0xd9 0xfa. */
8071FNIEMOP_DEF(iemOp_fsqrt)
8072{
8073 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8074 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8075}
8076
8077
8078/** Opcode 0xd9 0xfb. */
8079FNIEMOP_DEF(iemOp_fsincos)
8080{
8081 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8082 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8083}
8084
8085
8086/** Opcode 0xd9 0xfc. */
8087FNIEMOP_DEF(iemOp_frndint)
8088{
8089 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8090 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8091}
8092
8093
8094/** Opcode 0xd9 0xfd. */
8095FNIEMOP_DEF(iemOp_fscale)
8096{
8097 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8098 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8099}
8100
8101
8102/** Opcode 0xd9 0xfe. */
8103FNIEMOP_DEF(iemOp_fsin)
8104{
8105 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8106 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8107}
8108
8109
8110/** Opcode 0xd9 0xff. */
8111FNIEMOP_DEF(iemOp_fcos)
8112{
8113 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8114 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8115}
8116
8117
8118/** Used by iemOp_EscF1. */
8119IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8120{
8121 /* 0xe0 */ iemOp_fchs,
8122 /* 0xe1 */ iemOp_fabs,
8123 /* 0xe2 */ iemOp_Invalid,
8124 /* 0xe3 */ iemOp_Invalid,
8125 /* 0xe4 */ iemOp_ftst,
8126 /* 0xe5 */ iemOp_fxam,
8127 /* 0xe6 */ iemOp_Invalid,
8128 /* 0xe7 */ iemOp_Invalid,
8129 /* 0xe8 */ iemOp_fld1,
8130 /* 0xe9 */ iemOp_fldl2t,
8131 /* 0xea */ iemOp_fldl2e,
8132 /* 0xeb */ iemOp_fldpi,
8133 /* 0xec */ iemOp_fldlg2,
8134 /* 0xed */ iemOp_fldln2,
8135 /* 0xee */ iemOp_fldz,
8136 /* 0xef */ iemOp_Invalid,
8137 /* 0xf0 */ iemOp_f2xm1,
8138 /* 0xf1 */ iemOp_fyl2x,
8139 /* 0xf2 */ iemOp_fptan,
8140 /* 0xf3 */ iemOp_fpatan,
8141 /* 0xf4 */ iemOp_fxtract,
8142 /* 0xf5 */ iemOp_fprem1,
8143 /* 0xf6 */ iemOp_fdecstp,
8144 /* 0xf7 */ iemOp_fincstp,
8145 /* 0xf8 */ iemOp_fprem,
8146 /* 0xf9 */ iemOp_fyl2xp1,
8147 /* 0xfa */ iemOp_fsqrt,
8148 /* 0xfb */ iemOp_fsincos,
8149 /* 0xfc */ iemOp_frndint,
8150 /* 0xfd */ iemOp_fscale,
8151 /* 0xfe */ iemOp_fsin,
8152 /* 0xff */ iemOp_fcos
8153};
8154
8155
8156/**
8157 * @opcode 0xd9
8158 */
8159FNIEMOP_DEF(iemOp_EscF1)
8160{
8161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8162 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8163
8164 if (IEM_IS_MODRM_REG_MODE(bRm))
8165 {
8166 switch (IEM_GET_MODRM_REG_8(bRm))
8167 {
8168 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8169 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8170 case 2:
8171 if (bRm == 0xd0)
8172 return FNIEMOP_CALL(iemOp_fnop);
8173 return IEMOP_RAISE_INVALID_OPCODE();
8174 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8175 case 4:
8176 case 5:
8177 case 6:
8178 case 7:
8179 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8180 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8182 }
8183 }
8184 else
8185 {
8186 switch (IEM_GET_MODRM_REG_8(bRm))
8187 {
8188 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8189 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8190 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8191 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8192 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8193 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8194 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8195 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8197 }
8198 }
8199}
8200
8201
8202/** Opcode 0xda 11/0. */
8203FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8204{
8205 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8207
8208 IEM_MC_BEGIN(0, 1);
8209 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8210
8211 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8212 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8213
8214 IEM_MC_PREPARE_FPU_USAGE();
8215 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8216 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8217 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8218 IEM_MC_ENDIF();
8219 IEM_MC_UPDATE_FPU_OPCODE_IP();
8220 IEM_MC_ELSE()
8221 IEM_MC_FPU_STACK_UNDERFLOW(0);
8222 IEM_MC_ENDIF();
8223 IEM_MC_ADVANCE_RIP_AND_FINISH();
8224
8225 IEM_MC_END();
8226}
8227
8228
8229/** Opcode 0xda 11/1. */
8230FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8231{
8232 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8234
8235 IEM_MC_BEGIN(0, 1);
8236 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8237
8238 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8239 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8240
8241 IEM_MC_PREPARE_FPU_USAGE();
8242 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8243 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8244 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8245 IEM_MC_ENDIF();
8246 IEM_MC_UPDATE_FPU_OPCODE_IP();
8247 IEM_MC_ELSE()
8248 IEM_MC_FPU_STACK_UNDERFLOW(0);
8249 IEM_MC_ENDIF();
8250 IEM_MC_ADVANCE_RIP_AND_FINISH();
8251
8252 IEM_MC_END();
8253}
8254
8255
8256/** Opcode 0xda 11/2. */
8257FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8258{
8259 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8261
8262 IEM_MC_BEGIN(0, 1);
8263 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8264
8265 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8266 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8267
8268 IEM_MC_PREPARE_FPU_USAGE();
8269 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8270 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8271 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8272 IEM_MC_ENDIF();
8273 IEM_MC_UPDATE_FPU_OPCODE_IP();
8274 IEM_MC_ELSE()
8275 IEM_MC_FPU_STACK_UNDERFLOW(0);
8276 IEM_MC_ENDIF();
8277 IEM_MC_ADVANCE_RIP_AND_FINISH();
8278
8279 IEM_MC_END();
8280}
8281
8282
8283/** Opcode 0xda 11/3. */
8284FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8285{
8286 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8288
8289 IEM_MC_BEGIN(0, 1);
8290 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8291
8292 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8293 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8294
8295 IEM_MC_PREPARE_FPU_USAGE();
8296 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8298 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8299 IEM_MC_ENDIF();
8300 IEM_MC_UPDATE_FPU_OPCODE_IP();
8301 IEM_MC_ELSE()
8302 IEM_MC_FPU_STACK_UNDERFLOW(0);
8303 IEM_MC_ENDIF();
8304 IEM_MC_ADVANCE_RIP_AND_FINISH();
8305
8306 IEM_MC_END();
8307}
8308
8309
8310/**
8311 * Common worker for FPU instructions working on ST0 and ST1, only affecting
8312 * flags, and popping twice when done.
8313 *
8314 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8315 */
8316FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8317{
8318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8319
8320 IEM_MC_BEGIN(3, 1);
8321 IEM_MC_LOCAL(uint16_t, u16Fsw);
8322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8323 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8325
8326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8327 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8328
8329 IEM_MC_PREPARE_FPU_USAGE();
8330 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8331 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8332 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8333 IEM_MC_ELSE()
8334 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8335 IEM_MC_ENDIF();
8336 IEM_MC_ADVANCE_RIP_AND_FINISH();
8337
8338 IEM_MC_END();
8339}
8340
8341
8342/** Opcode 0xda 0xe9. */
8343FNIEMOP_DEF(iemOp_fucompp)
8344{
8345 IEMOP_MNEMONIC(fucompp, "fucompp");
8346 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
8347}
8348
8349
8350/**
8351 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8352 * the result in ST0.
8353 *
8354 * @param bRm Mod R/M byte.
8355 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8356 */
8357FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8358{
8359 IEM_MC_BEGIN(3, 3);
8360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8361 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8362 IEM_MC_LOCAL(int32_t, i32Val2);
8363 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8364 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8365 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8366
8367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8369
8370 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8371 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8372 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8373
8374 IEM_MC_PREPARE_FPU_USAGE();
8375 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8376 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8377 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8378 IEM_MC_ELSE()
8379 IEM_MC_FPU_STACK_UNDERFLOW(0);
8380 IEM_MC_ENDIF();
8381 IEM_MC_ADVANCE_RIP_AND_FINISH();
8382
8383 IEM_MC_END();
8384}
8385
8386
8387/** Opcode 0xda !11/0. */
8388FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8389{
8390 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8391 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8392}
8393
8394
8395/** Opcode 0xda !11/1. */
8396FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8397{
8398 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8399 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8400}
8401
8402
8403/** Opcode 0xda !11/2. */
8404FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8405{
8406 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8407
8408 IEM_MC_BEGIN(3, 3);
8409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8410 IEM_MC_LOCAL(uint16_t, u16Fsw);
8411 IEM_MC_LOCAL(int32_t, i32Val2);
8412 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8414 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8415
8416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418
8419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8421 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8422
8423 IEM_MC_PREPARE_FPU_USAGE();
8424 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8425 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8426 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8427 IEM_MC_ELSE()
8428 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8429 IEM_MC_ENDIF();
8430 IEM_MC_ADVANCE_RIP_AND_FINISH();
8431
8432 IEM_MC_END();
8433}
8434
8435
8436/** Opcode 0xda !11/3. */
8437FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8438{
8439 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8440
8441 IEM_MC_BEGIN(3, 3);
8442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8443 IEM_MC_LOCAL(uint16_t, u16Fsw);
8444 IEM_MC_LOCAL(int32_t, i32Val2);
8445 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8446 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8447 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8448
8449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8451
8452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8454 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8455
8456 IEM_MC_PREPARE_FPU_USAGE();
8457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8458 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8459 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8460 IEM_MC_ELSE()
8461 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8462 IEM_MC_ENDIF();
8463 IEM_MC_ADVANCE_RIP_AND_FINISH();
8464
8465 IEM_MC_END();
8466}
8467
8468
8469/** Opcode 0xda !11/4. */
8470FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8471{
8472 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8473 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8474}
8475
8476
8477/** Opcode 0xda !11/5. */
8478FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8479{
8480 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8481 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8482}
8483
8484
8485/** Opcode 0xda !11/6. */
8486FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8487{
8488 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8489 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8490}
8491
8492
8493/** Opcode 0xda !11/7. */
8494FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8495{
8496 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8497 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8498}
8499
8500
8501/**
8502 * @opcode 0xda
8503 */
8504FNIEMOP_DEF(iemOp_EscF2)
8505{
8506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8507 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8508 if (IEM_IS_MODRM_REG_MODE(bRm))
8509 {
8510 switch (IEM_GET_MODRM_REG_8(bRm))
8511 {
8512 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8513 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8514 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8515 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8516 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8517 case 5:
8518 if (bRm == 0xe9)
8519 return FNIEMOP_CALL(iemOp_fucompp);
8520 return IEMOP_RAISE_INVALID_OPCODE();
8521 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8522 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8524 }
8525 }
8526 else
8527 {
8528 switch (IEM_GET_MODRM_REG_8(bRm))
8529 {
8530 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8531 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8532 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8533 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8534 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8535 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8536 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8537 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8539 }
8540 }
8541}
8542
8543
8544/** Opcode 0xdb !11/0. */
8545FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8546{
8547 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8548
8549 IEM_MC_BEGIN(2, 3);
8550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8551 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8552 IEM_MC_LOCAL(int32_t, i32Val);
8553 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8554 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8555
8556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8558
8559 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8560 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8561 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8562
8563 IEM_MC_PREPARE_FPU_USAGE();
8564 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8565 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
8566 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8567 IEM_MC_ELSE()
8568 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8569 IEM_MC_ENDIF();
8570 IEM_MC_ADVANCE_RIP_AND_FINISH();
8571
8572 IEM_MC_END();
8573}
8574
8575
8576/** Opcode 0xdb !11/1. */
8577FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8578{
8579 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8580 IEM_MC_BEGIN(3, 2);
8581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8582 IEM_MC_LOCAL(uint16_t, u16Fsw);
8583 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8584 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8585 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8586
8587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8591
8592 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8593 IEM_MC_PREPARE_FPU_USAGE();
8594 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8595 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8596 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8597 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8598 IEM_MC_ELSE()
8599 IEM_MC_IF_FCW_IM()
8600 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8601 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8602 IEM_MC_ENDIF();
8603 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8604 IEM_MC_ENDIF();
8605 IEM_MC_ADVANCE_RIP_AND_FINISH();
8606
8607 IEM_MC_END();
8608}
8609
8610
8611/** Opcode 0xdb !11/2. */
8612FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8613{
8614 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8615 IEM_MC_BEGIN(3, 2);
8616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8617 IEM_MC_LOCAL(uint16_t, u16Fsw);
8618 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8619 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8620 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8621
8622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8626
8627 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8628 IEM_MC_PREPARE_FPU_USAGE();
8629 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8630 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8631 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8632 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8633 IEM_MC_ELSE()
8634 IEM_MC_IF_FCW_IM()
8635 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8636 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8637 IEM_MC_ENDIF();
8638 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8639 IEM_MC_ENDIF();
8640 IEM_MC_ADVANCE_RIP_AND_FINISH();
8641
8642 IEM_MC_END();
8643}
8644
8645
8646/** Opcode 0xdb !11/3. */
8647FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8648{
8649 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8650 IEM_MC_BEGIN(3, 2);
8651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8652 IEM_MC_LOCAL(uint16_t, u16Fsw);
8653 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8654 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8656
8657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8661
8662 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8663 IEM_MC_PREPARE_FPU_USAGE();
8664 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8665 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8666 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8667 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8668 IEM_MC_ELSE()
8669 IEM_MC_IF_FCW_IM()
8670 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8671 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8672 IEM_MC_ENDIF();
8673 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8674 IEM_MC_ENDIF();
8675 IEM_MC_ADVANCE_RIP_AND_FINISH();
8676
8677 IEM_MC_END();
8678}
8679
8680
8681/** Opcode 0xdb !11/5. */
8682FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8683{
8684 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8685
8686 IEM_MC_BEGIN(2, 3);
8687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8688 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8689 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8690 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8691 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8692
8693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8695
8696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8698 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8699
8700 IEM_MC_PREPARE_FPU_USAGE();
8701 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8702 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8703 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8704 IEM_MC_ELSE()
8705 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8706 IEM_MC_ENDIF();
8707 IEM_MC_ADVANCE_RIP_AND_FINISH();
8708
8709 IEM_MC_END();
8710}
8711
8712
8713/** Opcode 0xdb !11/7. */
8714FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8715{
8716 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8717 IEM_MC_BEGIN(3, 2);
8718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8719 IEM_MC_LOCAL(uint16_t, u16Fsw);
8720 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8721 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8722 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8723
8724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8728
8729 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
8730 IEM_MC_PREPARE_FPU_USAGE();
8731 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8732 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8733 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8734 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8735 IEM_MC_ELSE()
8736 IEM_MC_IF_FCW_IM()
8737 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8738 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8739 IEM_MC_ENDIF();
8740 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8741 IEM_MC_ENDIF();
8742 IEM_MC_ADVANCE_RIP_AND_FINISH();
8743
8744 IEM_MC_END();
8745}
8746
8747
8748/** Opcode 0xdb 11/0. */
8749FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8750{
8751 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8753
8754 IEM_MC_BEGIN(0, 1);
8755 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8756
8757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8759
8760 IEM_MC_PREPARE_FPU_USAGE();
8761 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8762 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8763 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8764 IEM_MC_ENDIF();
8765 IEM_MC_UPDATE_FPU_OPCODE_IP();
8766 IEM_MC_ELSE()
8767 IEM_MC_FPU_STACK_UNDERFLOW(0);
8768 IEM_MC_ENDIF();
8769 IEM_MC_ADVANCE_RIP_AND_FINISH();
8770
8771 IEM_MC_END();
8772}
8773
8774
8775/** Opcode 0xdb 11/1. */
8776FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8777{
8778 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8780
8781 IEM_MC_BEGIN(0, 1);
8782 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8783
8784 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8785 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8786
8787 IEM_MC_PREPARE_FPU_USAGE();
8788 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8789 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8790 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8791 IEM_MC_ENDIF();
8792 IEM_MC_UPDATE_FPU_OPCODE_IP();
8793 IEM_MC_ELSE()
8794 IEM_MC_FPU_STACK_UNDERFLOW(0);
8795 IEM_MC_ENDIF();
8796 IEM_MC_ADVANCE_RIP_AND_FINISH();
8797
8798 IEM_MC_END();
8799}
8800
8801
8802/** Opcode 0xdb 11/2. */
8803FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8804{
8805 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8807
8808 IEM_MC_BEGIN(0, 1);
8809 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8810
8811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8813
8814 IEM_MC_PREPARE_FPU_USAGE();
8815 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8816 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8817 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8818 IEM_MC_ENDIF();
8819 IEM_MC_UPDATE_FPU_OPCODE_IP();
8820 IEM_MC_ELSE()
8821 IEM_MC_FPU_STACK_UNDERFLOW(0);
8822 IEM_MC_ENDIF();
8823 IEM_MC_ADVANCE_RIP_AND_FINISH();
8824
8825 IEM_MC_END();
8826}
8827
8828
8829/** Opcode 0xdb 11/3. */
8830FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8831{
8832 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8834
8835 IEM_MC_BEGIN(0, 1);
8836 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8837
8838 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8839 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8840
8841 IEM_MC_PREPARE_FPU_USAGE();
8842 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8843 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8844 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8845 IEM_MC_ENDIF();
8846 IEM_MC_UPDATE_FPU_OPCODE_IP();
8847 IEM_MC_ELSE()
8848 IEM_MC_FPU_STACK_UNDERFLOW(0);
8849 IEM_MC_ENDIF();
8850 IEM_MC_ADVANCE_RIP_AND_FINISH();
8851
8852 IEM_MC_END();
8853}
8854
8855
8856/** Opcode 0xdb 0xe0. */
8857FNIEMOP_DEF(iemOp_fneni)
8858{
8859 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 IEM_MC_BEGIN(0,0);
8862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8863 IEM_MC_ADVANCE_RIP_AND_FINISH();
8864 IEM_MC_END();
8865}
8866
8867
8868/** Opcode 0xdb 0xe1. */
8869FNIEMOP_DEF(iemOp_fndisi)
8870{
8871 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8873 IEM_MC_BEGIN(0,0);
8874 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8875 IEM_MC_ADVANCE_RIP_AND_FINISH();
8876 IEM_MC_END();
8877}
8878
8879
8880/** Opcode 0xdb 0xe2. */
8881FNIEMOP_DEF(iemOp_fnclex)
8882{
8883 IEMOP_MNEMONIC(fnclex, "fnclex");
8884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8885
8886 IEM_MC_BEGIN(0,0);
8887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8888 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8889 IEM_MC_CLEAR_FSW_EX();
8890 IEM_MC_ADVANCE_RIP_AND_FINISH();
8891 IEM_MC_END();
8892}
8893
8894
8895/** Opcode 0xdb 0xe3. */
8896FNIEMOP_DEF(iemOp_fninit)
8897{
8898 IEMOP_MNEMONIC(fninit, "fninit");
8899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8900 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8901}
8902
8903
8904/** Opcode 0xdb 0xe4. */
8905FNIEMOP_DEF(iemOp_fnsetpm)
8906{
8907 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8909 IEM_MC_BEGIN(0,0);
8910 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8911 IEM_MC_ADVANCE_RIP_AND_FINISH();
8912 IEM_MC_END();
8913}
8914
8915
8916/** Opcode 0xdb 0xe5. */
8917FNIEMOP_DEF(iemOp_frstpm)
8918{
8919 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8920#if 0 /* #UDs on newer CPUs */
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8922 IEM_MC_BEGIN(0,0);
8923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8924 IEM_MC_ADVANCE_RIP_AND_FINISH();
8925 IEM_MC_END();
8926 return VINF_SUCCESS;
8927#else
8928 return IEMOP_RAISE_INVALID_OPCODE();
8929#endif
8930}
8931
8932
8933/** Opcode 0xdb 11/5. */
8934FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8935{
8936 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8937 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8938}
8939
8940
8941/** Opcode 0xdb 11/6. */
8942FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8943{
8944 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8945 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8946}
8947
8948
8949/**
8950 * @opcode 0xdb
8951 */
8952FNIEMOP_DEF(iemOp_EscF3)
8953{
8954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8955 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8956 if (IEM_IS_MODRM_REG_MODE(bRm))
8957 {
8958 switch (IEM_GET_MODRM_REG_8(bRm))
8959 {
8960 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8961 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8962 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8963 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8964 case 4:
8965 switch (bRm)
8966 {
8967 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8968 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8969 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8970 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8971 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8972 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8973 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8974 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8976 }
8977 break;
8978 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8979 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8980 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8982 }
8983 }
8984 else
8985 {
8986 switch (IEM_GET_MODRM_REG_8(bRm))
8987 {
8988 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8989 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8990 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8991 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8992 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8993 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8994 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8995 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8997 }
8998 }
8999}
9000
9001
9002/**
9003 * Common worker for FPU instructions working on STn and ST0, and storing the
9004 * result in STn unless IE, DE or ZE was raised.
9005 *
9006 * @param bRm Mod R/M byte.
9007 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9008 */
9009FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9010{
9011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9012
9013 IEM_MC_BEGIN(3, 1);
9014 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9015 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9016 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9017 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9018
9019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9021
9022 IEM_MC_PREPARE_FPU_USAGE();
9023 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
9024 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9025 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9026 IEM_MC_ELSE()
9027 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9028 IEM_MC_ENDIF();
9029 IEM_MC_ADVANCE_RIP_AND_FINISH();
9030
9031 IEM_MC_END();
9032}
9033
9034
9035/** Opcode 0xdc 11/0. */
9036FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9037{
9038 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9039 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9040}
9041
9042
9043/** Opcode 0xdc 11/1. */
9044FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9045{
9046 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9047 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9048}
9049
9050
9051/** Opcode 0xdc 11/4. */
9052FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9053{
9054 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9055 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9056}
9057
9058
9059/** Opcode 0xdc 11/5. */
9060FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9061{
9062 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9063 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9064}
9065
9066
9067/** Opcode 0xdc 11/6. */
9068FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9069{
9070 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9071 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9072}
9073
9074
9075/** Opcode 0xdc 11/7. */
9076FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9077{
9078 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9079 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9080}
9081
9082
9083/**
9084 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9085 * memory operand, and storing the result in ST0.
9086 *
9087 * @param bRm Mod R/M byte.
9088 * @param pfnImpl Pointer to the instruction implementation (assembly).
9089 */
9090FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9091{
9092 IEM_MC_BEGIN(3, 3);
9093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9094 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9095 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9096 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9097 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9098 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9099
9100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9102 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9103 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9104
9105 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9106 IEM_MC_PREPARE_FPU_USAGE();
9107 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9108 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9109 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9110 IEM_MC_ELSE()
9111 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9112 IEM_MC_ENDIF();
9113 IEM_MC_ADVANCE_RIP_AND_FINISH();
9114
9115 IEM_MC_END();
9116}
9117
9118
9119/** Opcode 0xdc !11/0. */
9120FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9121{
9122 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9123 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9124}
9125
9126
9127/** Opcode 0xdc !11/1. */
9128FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9129{
9130 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9131 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9132}
9133
9134
9135/** Opcode 0xdc !11/2. */
9136FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9137{
9138 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9139
9140 IEM_MC_BEGIN(3, 3);
9141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9142 IEM_MC_LOCAL(uint16_t, u16Fsw);
9143 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9144 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9145 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9146 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9147
9148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9150
9151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9153 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9154
9155 IEM_MC_PREPARE_FPU_USAGE();
9156 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9157 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9158 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9159 IEM_MC_ELSE()
9160 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9161 IEM_MC_ENDIF();
9162 IEM_MC_ADVANCE_RIP_AND_FINISH();
9163
9164 IEM_MC_END();
9165}
9166
9167
9168/** Opcode 0xdc !11/3. */
9169FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9170{
9171 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9172
9173 IEM_MC_BEGIN(3, 3);
9174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9175 IEM_MC_LOCAL(uint16_t, u16Fsw);
9176 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9177 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9178 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9179 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9180
9181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9183
9184 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9185 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9186 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9187
9188 IEM_MC_PREPARE_FPU_USAGE();
9189 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9190 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9191 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9192 IEM_MC_ELSE()
9193 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9194 IEM_MC_ENDIF();
9195 IEM_MC_ADVANCE_RIP_AND_FINISH();
9196
9197 IEM_MC_END();
9198}
9199
9200
9201/** Opcode 0xdc !11/4. */
9202FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9203{
9204 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9205 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9206}
9207
9208
9209/** Opcode 0xdc !11/5. */
9210FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9211{
9212 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9213 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9214}
9215
9216
9217/** Opcode 0xdc !11/6. */
9218FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9219{
9220 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9221 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9222}
9223
9224
9225/** Opcode 0xdc !11/7. */
9226FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9227{
9228 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9229 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9230}
9231
9232
9233/**
9234 * @opcode 0xdc
9235 */
9236FNIEMOP_DEF(iemOp_EscF4)
9237{
9238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9239 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9240 if (IEM_IS_MODRM_REG_MODE(bRm))
9241 {
9242 switch (IEM_GET_MODRM_REG_8(bRm))
9243 {
9244 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9245 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9246 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9247 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9248 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9249 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9250 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9251 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9253 }
9254 }
9255 else
9256 {
9257 switch (IEM_GET_MODRM_REG_8(bRm))
9258 {
9259 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9260 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9261 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9262 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9263 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9264 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9265 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9266 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9268 }
9269 }
9270}
9271
9272
9273/** Opcode 0xdd !11/0.
9274 * @sa iemOp_fld_m32r */
9275FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9276{
9277 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9278
9279 IEM_MC_BEGIN(2, 3);
9280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9281 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9282 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9283 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9284 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9285
9286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9288 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9289 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9290
9291 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9292 IEM_MC_PREPARE_FPU_USAGE();
9293 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9294 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
9295 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9296 IEM_MC_ELSE()
9297 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9298 IEM_MC_ENDIF();
9299 IEM_MC_ADVANCE_RIP_AND_FINISH();
9300
9301 IEM_MC_END();
9302}
9303
9304
9305/** Opcode 0xdd !11/0. */
9306FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9307{
9308 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9309 IEM_MC_BEGIN(3, 2);
9310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9311 IEM_MC_LOCAL(uint16_t, u16Fsw);
9312 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9313 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9314 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9315
9316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9320
9321 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9322 IEM_MC_PREPARE_FPU_USAGE();
9323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9324 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9325 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9326 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9327 IEM_MC_ELSE()
9328 IEM_MC_IF_FCW_IM()
9329 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9330 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9331 IEM_MC_ENDIF();
9332 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9333 IEM_MC_ENDIF();
9334 IEM_MC_ADVANCE_RIP_AND_FINISH();
9335
9336 IEM_MC_END();
9337}
9338
9339
9340/** Opcode 0xdd !11/0. */
9341FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9342{
9343 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9344 IEM_MC_BEGIN(3, 2);
9345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9346 IEM_MC_LOCAL(uint16_t, u16Fsw);
9347 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9348 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9350
9351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9355
9356 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9357 IEM_MC_PREPARE_FPU_USAGE();
9358 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9359 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9360 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9361 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9362 IEM_MC_ELSE()
9363 IEM_MC_IF_FCW_IM()
9364 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9365 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9366 IEM_MC_ENDIF();
9367 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9368 IEM_MC_ENDIF();
9369 IEM_MC_ADVANCE_RIP_AND_FINISH();
9370
9371 IEM_MC_END();
9372}
9373
9374
9375
9376
9377/** Opcode 0xdd !11/0. */
9378FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9379{
9380 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9381 IEM_MC_BEGIN(3, 2);
9382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9383 IEM_MC_LOCAL(uint16_t, u16Fsw);
9384 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9385 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9386 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9387
9388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9390 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9391 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9392
9393 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9394 IEM_MC_PREPARE_FPU_USAGE();
9395 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9396 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9397 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9398 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9399 IEM_MC_ELSE()
9400 IEM_MC_IF_FCW_IM()
9401 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9402 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9403 IEM_MC_ENDIF();
9404 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9405 IEM_MC_ENDIF();
9406 IEM_MC_ADVANCE_RIP_AND_FINISH();
9407
9408 IEM_MC_END();
9409}
9410
9411
9412/** Opcode 0xdd !11/0. */
9413FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9414{
9415 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9416 IEM_MC_BEGIN(3, 0);
9417 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9418 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9419 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9423 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9424 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9425 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9426 IEM_MC_END();
9427 return VINF_SUCCESS;
9428}
9429
9430
9431/** Opcode 0xdd !11/0. */
9432FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9433{
9434 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9435 IEM_MC_BEGIN(3, 0);
9436 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9437 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9438 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9442 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
9443 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9444 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9445 IEM_MC_END();
9446 return VINF_SUCCESS;
9447
9448}
9449
9450/** Opcode 0xdd !11/0. */
9451FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9452{
9453 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9454
9455 IEM_MC_BEGIN(0, 2);
9456 IEM_MC_LOCAL(uint16_t, u16Tmp);
9457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9458
9459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9462
9463 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9464 IEM_MC_FETCH_FSW(u16Tmp);
9465 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9466 IEM_MC_ADVANCE_RIP_AND_FINISH();
9467
9468/** @todo Debug / drop a hint to the verifier that things may differ
9469 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9470 * NT4SP1. (X86_FSW_PE) */
9471 IEM_MC_END();
9472}
9473
9474
9475/** Opcode 0xdd 11/0. */
9476FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9477{
9478 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9480 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9481 unmodified. */
9482
9483 IEM_MC_BEGIN(0, 0);
9484
9485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9487
9488 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9489 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9490 IEM_MC_UPDATE_FPU_OPCODE_IP();
9491
9492 IEM_MC_ADVANCE_RIP_AND_FINISH();
9493 IEM_MC_END();
9494}
9495
9496
9497/** Opcode 0xdd 11/1. */
9498FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9499{
9500 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9502
9503 IEM_MC_BEGIN(0, 2);
9504 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9505 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9507 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9508
9509 IEM_MC_PREPARE_FPU_USAGE();
9510 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9511 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9512 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9513 IEM_MC_ELSE()
9514 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9515 IEM_MC_ENDIF();
9516
9517 IEM_MC_ADVANCE_RIP_AND_FINISH();
9518 IEM_MC_END();
9519}
9520
9521
9522/** Opcode 0xdd 11/3. */
9523FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9524{
9525 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9526 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9527}
9528
9529
9530/** Opcode 0xdd 11/4. */
9531FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9532{
9533 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9534 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9535}
9536
9537
9538/**
9539 * @opcode 0xdd
9540 */
9541FNIEMOP_DEF(iemOp_EscF5)
9542{
9543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9544 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9545 if (IEM_IS_MODRM_REG_MODE(bRm))
9546 {
9547 switch (IEM_GET_MODRM_REG_8(bRm))
9548 {
9549 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9550 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9551 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9552 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9553 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9554 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9555 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9556 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9558 }
9559 }
9560 else
9561 {
9562 switch (IEM_GET_MODRM_REG_8(bRm))
9563 {
9564 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9565 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9566 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9567 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9568 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9569 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9570 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9571 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9573 }
9574 }
9575}
9576
9577
9578/** Opcode 0xde 11/0. */
9579FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9580{
9581 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9582 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9583}
9584
9585
9586/** Opcode 0xde 11/0. */
9587FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9588{
9589 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9590 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9591}
9592
9593
9594/** Opcode 0xde 0xd9. */
9595FNIEMOP_DEF(iemOp_fcompp)
9596{
9597 IEMOP_MNEMONIC(fcompp, "fcompp");
9598 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
9599}
9600
9601
9602/** Opcode 0xde 11/4. */
9603FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9604{
9605 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9606 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9607}
9608
9609
9610/** Opcode 0xde 11/5. */
9611FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9612{
9613 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9614 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9615}
9616
9617
9618/** Opcode 0xde 11/6. */
9619FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9620{
9621 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9622 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9623}
9624
9625
9626/** Opcode 0xde 11/7. */
9627FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9628{
9629 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9630 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9631}
9632
9633
9634/**
9635 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9636 * the result in ST0.
9637 *
9638 * @param bRm Mod R/M byte.
9639 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9640 */
9641FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9642{
9643 IEM_MC_BEGIN(3, 3);
9644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9645 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9646 IEM_MC_LOCAL(int16_t, i16Val2);
9647 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9648 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9649 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9650
9651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9653
9654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9656 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9657
9658 IEM_MC_PREPARE_FPU_USAGE();
9659 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9660 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9661 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9662 IEM_MC_ELSE()
9663 IEM_MC_FPU_STACK_UNDERFLOW(0);
9664 IEM_MC_ENDIF();
9665 IEM_MC_ADVANCE_RIP_AND_FINISH();
9666
9667 IEM_MC_END();
9668}
9669
9670
9671/** Opcode 0xde !11/0. */
9672FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9673{
9674 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9675 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9676}
9677
9678
9679/** Opcode 0xde !11/1. */
9680FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9681{
9682 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9683 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9684}
9685
9686
9687/** Opcode 0xde !11/2. */
9688FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9689{
9690 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9691
9692 IEM_MC_BEGIN(3, 3);
9693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9694 IEM_MC_LOCAL(uint16_t, u16Fsw);
9695 IEM_MC_LOCAL(int16_t, i16Val2);
9696 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9698 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9699
9700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9702
9703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9705 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9706
9707 IEM_MC_PREPARE_FPU_USAGE();
9708 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9709 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9710 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9711 IEM_MC_ELSE()
9712 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9713 IEM_MC_ENDIF();
9714 IEM_MC_ADVANCE_RIP_AND_FINISH();
9715
9716 IEM_MC_END();
9717}
9718
9719
9720/** Opcode 0xde !11/3. */
9721FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9722{
9723 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9724
9725 IEM_MC_BEGIN(3, 3);
9726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9727 IEM_MC_LOCAL(uint16_t, u16Fsw);
9728 IEM_MC_LOCAL(int16_t, i16Val2);
9729 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9730 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9731 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9732
9733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9735
9736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9738 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9739
9740 IEM_MC_PREPARE_FPU_USAGE();
9741 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9742 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9743 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9744 IEM_MC_ELSE()
9745 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9746 IEM_MC_ENDIF();
9747 IEM_MC_ADVANCE_RIP_AND_FINISH();
9748
9749 IEM_MC_END();
9750}
9751
9752
9753/** Opcode 0xde !11/4. */
9754FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9755{
9756 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9757 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9758}
9759
9760
9761/** Opcode 0xde !11/5. */
9762FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9763{
9764 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9765 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9766}
9767
9768
9769/** Opcode 0xde !11/6. */
9770FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9771{
9772 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9773 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9774}
9775
9776
9777/** Opcode 0xde !11/7. */
9778FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9779{
9780 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9781 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9782}
9783
9784
9785/**
9786 * @opcode 0xde
9787 */
9788FNIEMOP_DEF(iemOp_EscF6)
9789{
9790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9791 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9792 if (IEM_IS_MODRM_REG_MODE(bRm))
9793 {
9794 switch (IEM_GET_MODRM_REG_8(bRm))
9795 {
9796 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9797 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9798 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9799 case 3: if (bRm == 0xd9)
9800 return FNIEMOP_CALL(iemOp_fcompp);
9801 return IEMOP_RAISE_INVALID_OPCODE();
9802 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9803 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9804 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9805 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9807 }
9808 }
9809 else
9810 {
9811 switch (IEM_GET_MODRM_REG_8(bRm))
9812 {
9813 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9814 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9815 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9816 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9817 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9818 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9819 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9820 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9822 }
9823 }
9824}
9825
9826
9827/** Opcode 0xdf 11/0.
9828 * Undocument instruction, assumed to work like ffree + fincstp. */
9829FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9830{
9831 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9833
9834 IEM_MC_BEGIN(0, 0);
9835
9836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9837 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9838
9839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9840 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9841 IEM_MC_FPU_STACK_INC_TOP();
9842 IEM_MC_UPDATE_FPU_OPCODE_IP();
9843
9844 IEM_MC_ADVANCE_RIP_AND_FINISH();
9845 IEM_MC_END();
9846}
9847
9848
9849/** Opcode 0xdf 0xe0. */
9850FNIEMOP_DEF(iemOp_fnstsw_ax)
9851{
9852 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9854
9855 IEM_MC_BEGIN(0, 1);
9856 IEM_MC_LOCAL(uint16_t, u16Tmp);
9857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9858 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9859 IEM_MC_FETCH_FSW(u16Tmp);
9860 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9861 IEM_MC_ADVANCE_RIP_AND_FINISH();
9862 IEM_MC_END();
9863}
9864
9865
9866/** Opcode 0xdf 11/5. */
9867FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9868{
9869 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9870 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9871}
9872
9873
9874/** Opcode 0xdf 11/6. */
9875FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9876{
9877 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9878 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9879}
9880
9881
9882/** Opcode 0xdf !11/0. */
9883FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9884{
9885 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9886
9887 IEM_MC_BEGIN(2, 3);
9888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9889 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9890 IEM_MC_LOCAL(int16_t, i16Val);
9891 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9892 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9893
9894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9896
9897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9899 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9900
9901 IEM_MC_PREPARE_FPU_USAGE();
9902 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9903 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
9904 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9905 IEM_MC_ELSE()
9906 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9907 IEM_MC_ENDIF();
9908 IEM_MC_ADVANCE_RIP_AND_FINISH();
9909
9910 IEM_MC_END();
9911}
9912
9913
9914/** Opcode 0xdf !11/1. */
9915FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9916{
9917 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9918 IEM_MC_BEGIN(3, 2);
9919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9920 IEM_MC_LOCAL(uint16_t, u16Fsw);
9921 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9922 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9923 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9924
9925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9928 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9929
9930 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9931 IEM_MC_PREPARE_FPU_USAGE();
9932 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9933 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9934 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9935 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9936 IEM_MC_ELSE()
9937 IEM_MC_IF_FCW_IM()
9938 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9939 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9940 IEM_MC_ENDIF();
9941 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9942 IEM_MC_ENDIF();
9943 IEM_MC_ADVANCE_RIP_AND_FINISH();
9944
9945 IEM_MC_END();
9946}
9947
9948
9949/** Opcode 0xdf !11/2. */
9950FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9951{
9952 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9953 IEM_MC_BEGIN(3, 2);
9954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9955 IEM_MC_LOCAL(uint16_t, u16Fsw);
9956 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9957 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9958 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9959
9960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9962 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9964
9965 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9966 IEM_MC_PREPARE_FPU_USAGE();
9967 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9968 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9969 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9970 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9971 IEM_MC_ELSE()
9972 IEM_MC_IF_FCW_IM()
9973 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9974 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9975 IEM_MC_ENDIF();
9976 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9977 IEM_MC_ENDIF();
9978 IEM_MC_ADVANCE_RIP_AND_FINISH();
9979
9980 IEM_MC_END();
9981}
9982
9983
9984/** Opcode 0xdf !11/3. */
9985FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9986{
9987 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9988 IEM_MC_BEGIN(3, 2);
9989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9990 IEM_MC_LOCAL(uint16_t, u16Fsw);
9991 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9992 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9993 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9994
9995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9999
10000 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10001 IEM_MC_PREPARE_FPU_USAGE();
10002 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10003 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10004 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10005 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10006 IEM_MC_ELSE()
10007 IEM_MC_IF_FCW_IM()
10008 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10009 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10010 IEM_MC_ENDIF();
10011 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10012 IEM_MC_ENDIF();
10013 IEM_MC_ADVANCE_RIP_AND_FINISH();
10014
10015 IEM_MC_END();
10016}
10017
10018
10019/** Opcode 0xdf !11/4. */
10020FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10021{
10022 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10023
10024 IEM_MC_BEGIN(2, 3);
10025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10026 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10027 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10028 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10029 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10030
10031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10033
10034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10036 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10037
10038 IEM_MC_PREPARE_FPU_USAGE();
10039 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10040 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10041 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10042 IEM_MC_ELSE()
10043 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10044 IEM_MC_ENDIF();
10045 IEM_MC_ADVANCE_RIP_AND_FINISH();
10046
10047 IEM_MC_END();
10048}
10049
10050
10051/** Opcode 0xdf !11/5. */
10052FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10053{
10054 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10055
10056 IEM_MC_BEGIN(2, 3);
10057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10058 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10059 IEM_MC_LOCAL(int64_t, i64Val);
10060 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10061 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10062
10063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10065
10066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10067 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10068 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10069
10070 IEM_MC_PREPARE_FPU_USAGE();
10071 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10072 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10073 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10074 IEM_MC_ELSE()
10075 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10076 IEM_MC_ENDIF();
10077 IEM_MC_ADVANCE_RIP_AND_FINISH();
10078
10079 IEM_MC_END();
10080}
10081
10082
10083/** Opcode 0xdf !11/6. */
10084FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10085{
10086 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10087 IEM_MC_BEGIN(3, 2);
10088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10089 IEM_MC_LOCAL(uint16_t, u16Fsw);
10090 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10091 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10092 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10093
10094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10098
10099 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10100 IEM_MC_PREPARE_FPU_USAGE();
10101 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10102 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10103 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10104 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10105 IEM_MC_ELSE()
10106 IEM_MC_IF_FCW_IM()
10107 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10108 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10109 IEM_MC_ENDIF();
10110 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10111 IEM_MC_ENDIF();
10112 IEM_MC_ADVANCE_RIP_AND_FINISH();
10113
10114 IEM_MC_END();
10115}
10116
10117
10118/** Opcode 0xdf !11/7. */
10119FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10120{
10121 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10122 IEM_MC_BEGIN(3, 2);
10123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10124 IEM_MC_LOCAL(uint16_t, u16Fsw);
10125 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10126 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10127 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10128
10129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10133
10134 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10135 IEM_MC_PREPARE_FPU_USAGE();
10136 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10137 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10138 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10139 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10140 IEM_MC_ELSE()
10141 IEM_MC_IF_FCW_IM()
10142 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10143 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10144 IEM_MC_ENDIF();
10145 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10146 IEM_MC_ENDIF();
10147 IEM_MC_ADVANCE_RIP_AND_FINISH();
10148
10149 IEM_MC_END();
10150}
10151
10152
10153/**
10154 * @opcode 0xdf
10155 */
10156FNIEMOP_DEF(iemOp_EscF7)
10157{
10158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10159 if (IEM_IS_MODRM_REG_MODE(bRm))
10160 {
10161 switch (IEM_GET_MODRM_REG_8(bRm))
10162 {
10163 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10164 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10165 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10166 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10167 case 4: if (bRm == 0xe0)
10168 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10169 return IEMOP_RAISE_INVALID_OPCODE();
10170 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10171 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10172 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10174 }
10175 }
10176 else
10177 {
10178 switch (IEM_GET_MODRM_REG_8(bRm))
10179 {
10180 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10181 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10182 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10183 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10184 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10185 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10186 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10187 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10189 }
10190 }
10191}
10192
10193
10194/**
10195 * @opcode 0xe0
10196 */
10197FNIEMOP_DEF(iemOp_loopne_Jb)
10198{
10199 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10200 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10202 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10203
10204 switch (pVCpu->iem.s.enmEffAddrMode)
10205 {
10206 case IEMMODE_16BIT:
10207 IEM_MC_BEGIN(0,0);
10208 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10209 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10210 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10211 } IEM_MC_ELSE() {
10212 IEM_MC_ADVANCE_RIP_AND_FINISH();
10213 } IEM_MC_ENDIF();
10214 IEM_MC_END();
10215 break;
10216
10217 case IEMMODE_32BIT:
10218 IEM_MC_BEGIN(0,0);
10219 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10220 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10221 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10222 } IEM_MC_ELSE() {
10223 IEM_MC_ADVANCE_RIP_AND_FINISH();
10224 } IEM_MC_ENDIF();
10225 IEM_MC_END();
10226 break;
10227
10228 case IEMMODE_64BIT:
10229 IEM_MC_BEGIN(0,0);
10230 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10231 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10232 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10233 } IEM_MC_ELSE() {
10234 IEM_MC_ADVANCE_RIP_AND_FINISH();
10235 } IEM_MC_ENDIF();
10236 IEM_MC_END();
10237 break;
10238
10239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10240 }
10241}
10242
10243
10244/**
10245 * @opcode 0xe1
10246 */
10247FNIEMOP_DEF(iemOp_loope_Jb)
10248{
10249 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10250 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10253
10254 switch (pVCpu->iem.s.enmEffAddrMode)
10255 {
10256 case IEMMODE_16BIT:
10257 IEM_MC_BEGIN(0,0);
10258 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10259 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10260 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10261 } IEM_MC_ELSE() {
10262 IEM_MC_ADVANCE_RIP_AND_FINISH();
10263 } IEM_MC_ENDIF();
10264 IEM_MC_END();
10265 break;
10266
10267 case IEMMODE_32BIT:
10268 IEM_MC_BEGIN(0,0);
10269 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10270 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10271 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10272 } IEM_MC_ELSE() {
10273 IEM_MC_ADVANCE_RIP_AND_FINISH();
10274 } IEM_MC_ENDIF();
10275 IEM_MC_END();
10276 break;
10277
10278 case IEMMODE_64BIT:
10279 IEM_MC_BEGIN(0,0);
10280 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10281 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10282 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10283 } IEM_MC_ELSE() {
10284 IEM_MC_ADVANCE_RIP_AND_FINISH();
10285 } IEM_MC_ENDIF();
10286 IEM_MC_END();
10287 break;
10288
10289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10290 }
10291}
10292
10293
10294/**
10295 * @opcode 0xe2
10296 */
10297FNIEMOP_DEF(iemOp_loop_Jb)
10298{
10299 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10300 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10302 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10303
10304 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
10305 * using the 32-bit operand size override. How can that be restarted? See
10306 * weird pseudo code in intel manual. */
10307
10308 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10309 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10310 * the loop causes guest crashes, but when logging it's nice to skip a few million
10311 * lines of useless output. */
10312#if defined(LOG_ENABLED)
10313 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10314 switch (pVCpu->iem.s.enmEffAddrMode)
10315 {
10316 case IEMMODE_16BIT:
10317 IEM_MC_BEGIN(0,0);
10318 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10319 IEM_MC_ADVANCE_RIP_AND_FINISH();
10320 IEM_MC_END();
10321 break;
10322
10323 case IEMMODE_32BIT:
10324 IEM_MC_BEGIN(0,0);
10325 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10326 IEM_MC_ADVANCE_RIP_AND_FINISH();
10327 IEM_MC_END();
10328 break;
10329
10330 case IEMMODE_64BIT:
10331 IEM_MC_BEGIN(0,0);
10332 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10333 IEM_MC_ADVANCE_RIP_AND_FINISH();
10334 IEM_MC_END();
10335 break;
10336
10337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10338 }
10339#endif
10340
10341 switch (pVCpu->iem.s.enmEffAddrMode)
10342 {
10343 case IEMMODE_16BIT:
10344 IEM_MC_BEGIN(0,0);
10345
10346 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10347 IEM_MC_IF_CX_IS_NZ() {
10348 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10349 } IEM_MC_ELSE() {
10350 IEM_MC_ADVANCE_RIP_AND_FINISH();
10351 } IEM_MC_ENDIF();
10352 IEM_MC_END();
10353 break;
10354
10355 case IEMMODE_32BIT:
10356 IEM_MC_BEGIN(0,0);
10357 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10358 IEM_MC_IF_ECX_IS_NZ() {
10359 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10360 } IEM_MC_ELSE() {
10361 IEM_MC_ADVANCE_RIP_AND_FINISH();
10362 } IEM_MC_ENDIF();
10363 IEM_MC_END();
10364 break;
10365
10366 case IEMMODE_64BIT:
10367 IEM_MC_BEGIN(0,0);
10368 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10369 IEM_MC_IF_RCX_IS_NZ() {
10370 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10371 } IEM_MC_ELSE() {
10372 IEM_MC_ADVANCE_RIP_AND_FINISH();
10373 } IEM_MC_ENDIF();
10374 IEM_MC_END();
10375 break;
10376
10377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10378 }
10379}
10380
10381
10382/**
10383 * @opcode 0xe3
10384 */
10385FNIEMOP_DEF(iemOp_jecxz_Jb)
10386{
10387 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10388 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10390 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10391
10392 switch (pVCpu->iem.s.enmEffAddrMode)
10393 {
10394 case IEMMODE_16BIT:
10395 IEM_MC_BEGIN(0,0);
10396 IEM_MC_IF_CX_IS_NZ() {
10397 IEM_MC_ADVANCE_RIP_AND_FINISH();
10398 } IEM_MC_ELSE() {
10399 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10400 } IEM_MC_ENDIF();
10401 IEM_MC_END();
10402 break;
10403
10404 case IEMMODE_32BIT:
10405 IEM_MC_BEGIN(0,0);
10406 IEM_MC_IF_ECX_IS_NZ() {
10407 IEM_MC_ADVANCE_RIP_AND_FINISH();
10408 } IEM_MC_ELSE() {
10409 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10410 } IEM_MC_ENDIF();
10411 IEM_MC_END();
10412 break;
10413
10414 case IEMMODE_64BIT:
10415 IEM_MC_BEGIN(0,0);
10416 IEM_MC_IF_RCX_IS_NZ() {
10417 IEM_MC_ADVANCE_RIP_AND_FINISH();
10418 } IEM_MC_ELSE() {
10419 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10420 } IEM_MC_ENDIF();
10421 IEM_MC_END();
10422 break;
10423
10424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10425 }
10426}
10427
10428
10429/** Opcode 0xe4 */
10430FNIEMOP_DEF(iemOp_in_AL_Ib)
10431{
10432 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10435 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10436}
10437
10438
10439/** Opcode 0xe5 */
10440FNIEMOP_DEF(iemOp_in_eAX_Ib)
10441{
10442 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10443 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10445 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10446}
10447
10448
10449/** Opcode 0xe6 */
10450FNIEMOP_DEF(iemOp_out_Ib_AL)
10451{
10452 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10453 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10455 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10456}
10457
10458
10459/** Opcode 0xe7 */
10460FNIEMOP_DEF(iemOp_out_Ib_eAX)
10461{
10462 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10463 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10466}
10467
10468
10469/**
10470 * @opcode 0xe8
10471 */
10472FNIEMOP_DEF(iemOp_call_Jv)
10473{
10474 IEMOP_MNEMONIC(call_Jv, "call Jv");
10475 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10476 switch (pVCpu->iem.s.enmEffOpSize)
10477 {
10478 case IEMMODE_16BIT:
10479 {
10480 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10481 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10482 }
10483
10484 case IEMMODE_32BIT:
10485 {
10486 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10487 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10488 }
10489
10490 case IEMMODE_64BIT:
10491 {
10492 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10493 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10494 }
10495
10496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10497 }
10498}
10499
10500
10501/**
10502 * @opcode 0xe9
10503 */
10504FNIEMOP_DEF(iemOp_jmp_Jv)
10505{
10506 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10507 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10508 switch (pVCpu->iem.s.enmEffOpSize)
10509 {
10510 case IEMMODE_16BIT:
10511 {
10512 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10513 IEM_MC_BEGIN(0, 0);
10514 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
10515 IEM_MC_END();
10516 return VINF_SUCCESS;
10517 }
10518
10519 case IEMMODE_64BIT:
10520 case IEMMODE_32BIT:
10521 {
10522 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10523 IEM_MC_BEGIN(0, 0);
10524 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
10525 IEM_MC_END();
10526 return VINF_SUCCESS;
10527 }
10528
10529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10530 }
10531}
10532
10533
10534/**
10535 * @opcode 0xea
10536 */
10537FNIEMOP_DEF(iemOp_jmp_Ap)
10538{
10539 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10540 IEMOP_HLP_NO_64BIT();
10541
10542 /* Decode the far pointer address and pass it on to the far call C implementation. */
10543 uint32_t offSeg;
10544 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10545 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10546 else
10547 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10548 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10550 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10551}
10552
10553
10554/**
10555 * @opcode 0xeb
10556 */
10557FNIEMOP_DEF(iemOp_jmp_Jb)
10558{
10559 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10560 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10562 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10563
10564 IEM_MC_BEGIN(0, 0);
10565 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10566 IEM_MC_END();
10567 return VINF_SUCCESS;
10568}
10569
10570
10571/** Opcode 0xec */
10572FNIEMOP_DEF(iemOp_in_AL_DX)
10573{
10574 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10576 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10577}
10578
10579
10580/** Opcode 0xed */
10581FNIEMOP_DEF(iemOp_in_eAX_DX)
10582{
10583 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10585 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10586}
10587
10588
10589/** Opcode 0xee */
10590FNIEMOP_DEF(iemOp_out_DX_AL)
10591{
10592 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10594 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10595}
10596
10597
10598/** Opcode 0xef */
10599FNIEMOP_DEF(iemOp_out_DX_eAX)
10600{
10601 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10603 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10604}
10605
10606
10607/**
10608 * @opcode 0xf0
10609 */
10610FNIEMOP_DEF(iemOp_lock)
10611{
10612 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10613 if (!pVCpu->iem.s.fDisregardLock)
10614 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10615
10616 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10617 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10618}
10619
10620
10621/**
10622 * @opcode 0xf1
10623 */
10624FNIEMOP_DEF(iemOp_int1)
10625{
10626 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10627 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
10628 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10629 * LOADALL memo. Needs some testing. */
10630 IEMOP_HLP_MIN_386();
10631 /** @todo testcase! */
10632 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10633}
10634
10635
10636/**
10637 * @opcode 0xf2
10638 */
10639FNIEMOP_DEF(iemOp_repne)
10640{
10641 /* This overrides any previous REPE prefix. */
10642 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10643 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10644 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10645
10646 /* For the 4 entry opcode tables, REPNZ overrides any previous
10647 REPZ and operand size prefixes. */
10648 pVCpu->iem.s.idxPrefix = 3;
10649
10650 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10651 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10652}
10653
10654
10655/**
10656 * @opcode 0xf3
10657 */
10658FNIEMOP_DEF(iemOp_repe)
10659{
10660 /* This overrides any previous REPNE prefix. */
10661 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10662 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10663 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10664
10665 /* For the 4 entry opcode tables, REPNZ overrides any previous
10666 REPNZ and operand size prefixes. */
10667 pVCpu->iem.s.idxPrefix = 2;
10668
10669 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10670 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10671}
10672
10673
10674/**
10675 * @opcode 0xf4
10676 */
10677FNIEMOP_DEF(iemOp_hlt)
10678{
10679 IEMOP_MNEMONIC(hlt, "hlt");
10680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10681 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10682}
10683
10684
10685/**
10686 * @opcode 0xf5
10687 */
10688FNIEMOP_DEF(iemOp_cmc)
10689{
10690 IEMOP_MNEMONIC(cmc, "cmc");
10691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10692 IEM_MC_BEGIN(0, 0);
10693 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10694 IEM_MC_ADVANCE_RIP_AND_FINISH();
10695 IEM_MC_END();
10696}
10697
10698
10699/**
10700 * Common implementation of 'inc/dec/not/neg Eb'.
10701 *
10702 * @param bRm The RM byte.
10703 * @param pImpl The instruction implementation.
10704 */
10705FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10706{
10707 if (IEM_IS_MODRM_REG_MODE(bRm))
10708 {
10709 /* register access */
10710 IEM_MC_BEGIN(2, 0);
10711 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10712 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10713 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10714 IEM_MC_REF_EFLAGS(pEFlags);
10715 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10716 IEM_MC_ADVANCE_RIP_AND_FINISH();
10717 IEM_MC_END();
10718 }
10719 else
10720 {
10721 /* memory access. */
10722 IEM_MC_BEGIN(2, 2);
10723 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10724 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10726
10727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10728 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10729 IEM_MC_FETCH_EFLAGS(EFlags);
10730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10731 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10732 else
10733 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10734
10735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10736 IEM_MC_COMMIT_EFLAGS(EFlags);
10737 IEM_MC_ADVANCE_RIP_AND_FINISH();
10738 IEM_MC_END();
10739 }
10740}
10741
10742
10743/**
10744 * Common implementation of 'inc/dec/not/neg Ev'.
10745 *
10746 * @param bRm The RM byte.
10747 * @param pImpl The instruction implementation.
10748 */
10749FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10750{
10751 /* Registers are handled by a common worker. */
10752 if (IEM_IS_MODRM_REG_MODE(bRm))
10753 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
10754
10755 /* Memory we do here. */
10756 switch (pVCpu->iem.s.enmEffOpSize)
10757 {
10758 case IEMMODE_16BIT:
10759 IEM_MC_BEGIN(2, 2);
10760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10761 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10763
10764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10765 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10766 IEM_MC_FETCH_EFLAGS(EFlags);
10767 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10768 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10769 else
10770 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10771
10772 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10773 IEM_MC_COMMIT_EFLAGS(EFlags);
10774 IEM_MC_ADVANCE_RIP_AND_FINISH();
10775 IEM_MC_END();
10776 break;
10777
10778 case IEMMODE_32BIT:
10779 IEM_MC_BEGIN(2, 2);
10780 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10781 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10783
10784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10785 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10786 IEM_MC_FETCH_EFLAGS(EFlags);
10787 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10788 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10789 else
10790 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10791
10792 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10793 IEM_MC_COMMIT_EFLAGS(EFlags);
10794 IEM_MC_ADVANCE_RIP_AND_FINISH();
10795 IEM_MC_END();
10796 break;
10797
10798 case IEMMODE_64BIT:
10799 IEM_MC_BEGIN(2, 2);
10800 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10801 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10803
10804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10805 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10806 IEM_MC_FETCH_EFLAGS(EFlags);
10807 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10808 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10809 else
10810 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10811
10812 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10813 IEM_MC_COMMIT_EFLAGS(EFlags);
10814 IEM_MC_ADVANCE_RIP_AND_FINISH();
10815 IEM_MC_END();
10816 break;
10817
10818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10819 }
10820}
10821
10822
10823/** Opcode 0xf6 /0. */
10824FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10825{
10826 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10827 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10828
10829 if (IEM_IS_MODRM_REG_MODE(bRm))
10830 {
10831 /* register access */
10832 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10834
10835 IEM_MC_BEGIN(3, 0);
10836 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10837 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10838 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10839 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10840 IEM_MC_REF_EFLAGS(pEFlags);
10841 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10842 IEM_MC_ADVANCE_RIP_AND_FINISH();
10843 IEM_MC_END();
10844 }
10845 else
10846 {
10847 /* memory access. */
10848 IEM_MC_BEGIN(3, 2);
10849 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10850 IEM_MC_ARG(uint8_t, u8Src, 1);
10851 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10853
10854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10855 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10856 IEM_MC_ASSIGN(u8Src, u8Imm);
10857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10858 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10859 IEM_MC_FETCH_EFLAGS(EFlags);
10860 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10861
10862 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10863 IEM_MC_COMMIT_EFLAGS(EFlags);
10864 IEM_MC_ADVANCE_RIP_AND_FINISH();
10865 IEM_MC_END();
10866 }
10867}
10868
10869
10870/** Opcode 0xf7 /0. */
10871FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10872{
10873 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10875
10876 if (IEM_IS_MODRM_REG_MODE(bRm))
10877 {
10878 /* register access */
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880 switch (pVCpu->iem.s.enmEffOpSize)
10881 {
10882 case IEMMODE_16BIT:
10883 {
10884 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10885 IEM_MC_BEGIN(3, 0);
10886 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10887 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10889 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10890 IEM_MC_REF_EFLAGS(pEFlags);
10891 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10892 IEM_MC_ADVANCE_RIP_AND_FINISH();
10893 IEM_MC_END();
10894 break;
10895 }
10896
10897 case IEMMODE_32BIT:
10898 {
10899 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10900 IEM_MC_BEGIN(3, 0);
10901 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10902 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10903 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10904 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10905 IEM_MC_REF_EFLAGS(pEFlags);
10906 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10907 /* No clearing the high dword here - test doesn't write back the result. */
10908 IEM_MC_ADVANCE_RIP_AND_FINISH();
10909 IEM_MC_END();
10910 break;
10911 }
10912
10913 case IEMMODE_64BIT:
10914 {
10915 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10916 IEM_MC_BEGIN(3, 0);
10917 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10918 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10919 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10920 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10921 IEM_MC_REF_EFLAGS(pEFlags);
10922 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10923 IEM_MC_ADVANCE_RIP_AND_FINISH();
10924 IEM_MC_END();
10925 break;
10926 }
10927
10928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10929 }
10930 }
10931 else
10932 {
10933 /* memory access. */
10934 switch (pVCpu->iem.s.enmEffOpSize)
10935 {
10936 case IEMMODE_16BIT:
10937 {
10938 IEM_MC_BEGIN(3, 2);
10939 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10940 IEM_MC_ARG(uint16_t, u16Src, 1);
10941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10943
10944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10945 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10946 IEM_MC_ASSIGN(u16Src, u16Imm);
10947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10948 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10949 IEM_MC_FETCH_EFLAGS(EFlags);
10950 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10951
10952 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10953 IEM_MC_COMMIT_EFLAGS(EFlags);
10954 IEM_MC_ADVANCE_RIP_AND_FINISH();
10955 IEM_MC_END();
10956 break;
10957 }
10958
10959 case IEMMODE_32BIT:
10960 {
10961 IEM_MC_BEGIN(3, 2);
10962 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10963 IEM_MC_ARG(uint32_t, u32Src, 1);
10964 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10966
10967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10968 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10969 IEM_MC_ASSIGN(u32Src, u32Imm);
10970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10971 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10972 IEM_MC_FETCH_EFLAGS(EFlags);
10973 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10974
10975 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10976 IEM_MC_COMMIT_EFLAGS(EFlags);
10977 IEM_MC_ADVANCE_RIP_AND_FINISH();
10978 IEM_MC_END();
10979 break;
10980 }
10981
10982 case IEMMODE_64BIT:
10983 {
10984 IEM_MC_BEGIN(3, 2);
10985 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10986 IEM_MC_ARG(uint64_t, u64Src, 1);
10987 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10989
10990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10991 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10992 IEM_MC_ASSIGN(u64Src, u64Imm);
10993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10994 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10995 IEM_MC_FETCH_EFLAGS(EFlags);
10996 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10997
10998 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10999 IEM_MC_COMMIT_EFLAGS(EFlags);
11000 IEM_MC_ADVANCE_RIP_AND_FINISH();
11001 IEM_MC_END();
11002 break;
11003 }
11004
11005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11006 }
11007 }
11008}
11009
11010
11011/** Opcode 0xf6 /4, /5, /6 and /7. */
11012FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11013{
11014 if (IEM_IS_MODRM_REG_MODE(bRm))
11015 {
11016 /* register access */
11017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11018 IEM_MC_BEGIN(3, 1);
11019 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11020 IEM_MC_ARG(uint8_t, u8Value, 1);
11021 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11022 IEM_MC_LOCAL(int32_t, rc);
11023
11024 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11025 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11026 IEM_MC_REF_EFLAGS(pEFlags);
11027 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11028 IEM_MC_IF_LOCAL_IS_Z(rc) {
11029 IEM_MC_ADVANCE_RIP_AND_FINISH();
11030 } IEM_MC_ELSE() {
11031 IEM_MC_RAISE_DIVIDE_ERROR();
11032 } IEM_MC_ENDIF();
11033
11034 IEM_MC_END();
11035 }
11036 else
11037 {
11038 /* memory access. */
11039 IEM_MC_BEGIN(3, 2);
11040 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11041 IEM_MC_ARG(uint8_t, u8Value, 1);
11042 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11044 IEM_MC_LOCAL(int32_t, rc);
11045
11046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11048 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11049 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11050 IEM_MC_REF_EFLAGS(pEFlags);
11051 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11052 IEM_MC_IF_LOCAL_IS_Z(rc) {
11053 IEM_MC_ADVANCE_RIP_AND_FINISH();
11054 } IEM_MC_ELSE() {
11055 IEM_MC_RAISE_DIVIDE_ERROR();
11056 } IEM_MC_ENDIF();
11057
11058 IEM_MC_END();
11059 }
11060}
11061
11062
11063/** Opcode 0xf7 /4, /5, /6 and /7. */
11064FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11065{
11066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11067
11068 if (IEM_IS_MODRM_REG_MODE(bRm))
11069 {
11070 /* register access */
11071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11072 switch (pVCpu->iem.s.enmEffOpSize)
11073 {
11074 case IEMMODE_16BIT:
11075 {
11076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11077 IEM_MC_BEGIN(4, 1);
11078 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11079 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11080 IEM_MC_ARG(uint16_t, u16Value, 2);
11081 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11082 IEM_MC_LOCAL(int32_t, rc);
11083
11084 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11085 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11086 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11087 IEM_MC_REF_EFLAGS(pEFlags);
11088 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11089 IEM_MC_IF_LOCAL_IS_Z(rc) {
11090 IEM_MC_ADVANCE_RIP_AND_FINISH();
11091 } IEM_MC_ELSE() {
11092 IEM_MC_RAISE_DIVIDE_ERROR();
11093 } IEM_MC_ENDIF();
11094
11095 IEM_MC_END();
11096 break;
11097 }
11098
11099 case IEMMODE_32BIT:
11100 {
11101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11102 IEM_MC_BEGIN(4, 1);
11103 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11104 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11105 IEM_MC_ARG(uint32_t, u32Value, 2);
11106 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11107 IEM_MC_LOCAL(int32_t, rc);
11108
11109 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11110 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11111 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11112 IEM_MC_REF_EFLAGS(pEFlags);
11113 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11114 IEM_MC_IF_LOCAL_IS_Z(rc) {
11115 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11116 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11117 IEM_MC_ADVANCE_RIP_AND_FINISH();
11118 } IEM_MC_ELSE() {
11119 IEM_MC_RAISE_DIVIDE_ERROR();
11120 } IEM_MC_ENDIF();
11121
11122 IEM_MC_END();
11123 break;
11124 }
11125
11126 case IEMMODE_64BIT:
11127 {
11128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11129 IEM_MC_BEGIN(4, 1);
11130 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11131 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11132 IEM_MC_ARG(uint64_t, u64Value, 2);
11133 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11134 IEM_MC_LOCAL(int32_t, rc);
11135
11136 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11137 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11138 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11139 IEM_MC_REF_EFLAGS(pEFlags);
11140 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11141 IEM_MC_IF_LOCAL_IS_Z(rc) {
11142 IEM_MC_ADVANCE_RIP_AND_FINISH();
11143 } IEM_MC_ELSE() {
11144 IEM_MC_RAISE_DIVIDE_ERROR();
11145 } IEM_MC_ENDIF();
11146
11147 IEM_MC_END();
11148 break;
11149 }
11150
11151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11152 }
11153 }
11154 else
11155 {
11156 /* memory access. */
11157 switch (pVCpu->iem.s.enmEffOpSize)
11158 {
11159 case IEMMODE_16BIT:
11160 {
11161 IEM_MC_BEGIN(4, 2);
11162 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11163 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11164 IEM_MC_ARG(uint16_t, u16Value, 2);
11165 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11167 IEM_MC_LOCAL(int32_t, rc);
11168
11169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11171 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11172 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11173 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11174 IEM_MC_REF_EFLAGS(pEFlags);
11175 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11176 IEM_MC_IF_LOCAL_IS_Z(rc) {
11177 IEM_MC_ADVANCE_RIP_AND_FINISH();
11178 } IEM_MC_ELSE() {
11179 IEM_MC_RAISE_DIVIDE_ERROR();
11180 } IEM_MC_ENDIF();
11181
11182 IEM_MC_END();
11183 break;
11184 }
11185
11186 case IEMMODE_32BIT:
11187 {
11188 IEM_MC_BEGIN(4, 2);
11189 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11190 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11191 IEM_MC_ARG(uint32_t, u32Value, 2);
11192 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11194 IEM_MC_LOCAL(int32_t, rc);
11195
11196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11198 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11199 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11200 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11201 IEM_MC_REF_EFLAGS(pEFlags);
11202 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11203 IEM_MC_IF_LOCAL_IS_Z(rc) {
11204 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11205 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11206 IEM_MC_ADVANCE_RIP_AND_FINISH();
11207 } IEM_MC_ELSE() {
11208 IEM_MC_RAISE_DIVIDE_ERROR();
11209 } IEM_MC_ENDIF();
11210
11211 IEM_MC_END();
11212 break;
11213 }
11214
11215 case IEMMODE_64BIT:
11216 {
11217 IEM_MC_BEGIN(4, 2);
11218 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11219 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11220 IEM_MC_ARG(uint64_t, u64Value, 2);
11221 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11223 IEM_MC_LOCAL(int32_t, rc);
11224
11225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11227 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11228 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11229 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11230 IEM_MC_REF_EFLAGS(pEFlags);
11231 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11232 IEM_MC_IF_LOCAL_IS_Z(rc) {
11233 IEM_MC_ADVANCE_RIP_AND_FINISH();
11234 } IEM_MC_ELSE() {
11235 IEM_MC_RAISE_DIVIDE_ERROR();
11236 } IEM_MC_ENDIF();
11237
11238 IEM_MC_END();
11239 break;
11240 }
11241
11242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11243 }
11244 }
11245}
11246
11247/**
11248 * @opcode 0xf6
11249 */
11250FNIEMOP_DEF(iemOp_Grp3_Eb)
11251{
11252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11253 switch (IEM_GET_MODRM_REG_8(bRm))
11254 {
11255 case 0:
11256 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11257 case 1:
11258/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11259 return IEMOP_RAISE_INVALID_OPCODE();
11260 case 2:
11261 IEMOP_MNEMONIC(not_Eb, "not Eb");
11262 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11263 case 3:
11264 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11265 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11266 case 4:
11267 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11269 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11270 case 5:
11271 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11272 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11273 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11274 case 6:
11275 IEMOP_MNEMONIC(div_Eb, "div Eb");
11276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11277 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11278 case 7:
11279 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11281 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11283 }
11284}
11285
11286
11287/**
11288 * @opcode 0xf7
11289 */
11290FNIEMOP_DEF(iemOp_Grp3_Ev)
11291{
11292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11293 switch (IEM_GET_MODRM_REG_8(bRm))
11294 {
11295 case 0:
11296 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11297 case 1:
11298/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11299 return IEMOP_RAISE_INVALID_OPCODE();
11300 case 2:
11301 IEMOP_MNEMONIC(not_Ev, "not Ev");
11302 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11303 case 3:
11304 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11305 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11306 case 4:
11307 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11309 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11310 case 5:
11311 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11312 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11313 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11314 case 6:
11315 IEMOP_MNEMONIC(div_Ev, "div Ev");
11316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11317 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11318 case 7:
11319 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11321 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11323 }
11324}
11325
11326
11327/**
11328 * @opcode 0xf8
11329 */
11330FNIEMOP_DEF(iemOp_clc)
11331{
11332 IEMOP_MNEMONIC(clc, "clc");
11333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11334 IEM_MC_BEGIN(0, 0);
11335 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11336 IEM_MC_ADVANCE_RIP_AND_FINISH();
11337 IEM_MC_END();
11338}
11339
11340
11341/**
11342 * @opcode 0xf9
11343 */
11344FNIEMOP_DEF(iemOp_stc)
11345{
11346 IEMOP_MNEMONIC(stc, "stc");
11347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11348 IEM_MC_BEGIN(0, 0);
11349 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11350 IEM_MC_ADVANCE_RIP_AND_FINISH();
11351 IEM_MC_END();
11352}
11353
11354
11355/**
11356 * @opcode 0xfa
11357 */
11358FNIEMOP_DEF(iemOp_cli)
11359{
11360 IEMOP_MNEMONIC(cli, "cli");
11361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11363}
11364
11365
11366FNIEMOP_DEF(iemOp_sti)
11367{
11368 IEMOP_MNEMONIC(sti, "sti");
11369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11370 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11371}
11372
11373
11374/**
11375 * @opcode 0xfc
11376 */
11377FNIEMOP_DEF(iemOp_cld)
11378{
11379 IEMOP_MNEMONIC(cld, "cld");
11380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11381 IEM_MC_BEGIN(0, 0);
11382 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11383 IEM_MC_ADVANCE_RIP_AND_FINISH();
11384 IEM_MC_END();
11385}
11386
11387
11388/**
11389 * @opcode 0xfd
11390 */
11391FNIEMOP_DEF(iemOp_std)
11392{
11393 IEMOP_MNEMONIC(std, "std");
11394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11395 IEM_MC_BEGIN(0, 0);
11396 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11397 IEM_MC_ADVANCE_RIP_AND_FINISH();
11398 IEM_MC_END();
11399}
11400
11401
11402/**
11403 * @opcode 0xfe
11404 */
11405FNIEMOP_DEF(iemOp_Grp4)
11406{
11407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11408 switch (IEM_GET_MODRM_REG_8(bRm))
11409 {
11410 case 0:
11411 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11412 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11413 case 1:
11414 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11415 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11416 default:
11417 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11418 return IEMOP_RAISE_INVALID_OPCODE();
11419 }
11420}
11421
11422
11423/**
11424 * Opcode 0xff /2.
11425 * @param bRm The RM byte.
11426 */
11427FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11428{
11429 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11431
11432 if (IEM_IS_MODRM_REG_MODE(bRm))
11433 {
11434 /* The new RIP is taken from a register. */
11435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11436 switch (pVCpu->iem.s.enmEffOpSize)
11437 {
11438 case IEMMODE_16BIT:
11439 IEM_MC_BEGIN(1, 0);
11440 IEM_MC_ARG(uint16_t, u16Target, 0);
11441 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11442 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11443 IEM_MC_END()
11444 return VINF_SUCCESS;
11445
11446 case IEMMODE_32BIT:
11447 IEM_MC_BEGIN(1, 0);
11448 IEM_MC_ARG(uint32_t, u32Target, 0);
11449 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11450 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11451 IEM_MC_END()
11452 return VINF_SUCCESS;
11453
11454 case IEMMODE_64BIT:
11455 IEM_MC_BEGIN(1, 0);
11456 IEM_MC_ARG(uint64_t, u64Target, 0);
11457 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11458 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11459 IEM_MC_END()
11460 return VINF_SUCCESS;
11461
11462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11463 }
11464 }
11465 else
11466 {
11467 /* The new RIP is taken from a register. */
11468 switch (pVCpu->iem.s.enmEffOpSize)
11469 {
11470 case IEMMODE_16BIT:
11471 IEM_MC_BEGIN(1, 1);
11472 IEM_MC_ARG(uint16_t, u16Target, 0);
11473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11476 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11477 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11478 IEM_MC_END()
11479 return VINF_SUCCESS;
11480
11481 case IEMMODE_32BIT:
11482 IEM_MC_BEGIN(1, 1);
11483 IEM_MC_ARG(uint32_t, u32Target, 0);
11484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11487 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11488 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11489 IEM_MC_END()
11490 return VINF_SUCCESS;
11491
11492 case IEMMODE_64BIT:
11493 IEM_MC_BEGIN(1, 1);
11494 IEM_MC_ARG(uint64_t, u64Target, 0);
11495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11498 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11499 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11500 IEM_MC_END()
11501 return VINF_SUCCESS;
11502
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 }
11506}
11507
11508typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11509
11510FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11511{
11512 /* Registers? How?? */
11513 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
11514 { /* likely */ }
11515 else
11516 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11517
11518 /* Far pointer loaded from memory. */
11519 switch (pVCpu->iem.s.enmEffOpSize)
11520 {
11521 case IEMMODE_16BIT:
11522 IEM_MC_BEGIN(3, 1);
11523 IEM_MC_ARG(uint16_t, u16Sel, 0);
11524 IEM_MC_ARG(uint16_t, offSeg, 1);
11525 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11529 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11530 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11531 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11532 IEM_MC_END();
11533 return VINF_SUCCESS;
11534
11535 case IEMMODE_64BIT:
11536 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11537 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11538 * and call far qword [rsp] encodings. */
11539 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11540 {
11541 IEM_MC_BEGIN(3, 1);
11542 IEM_MC_ARG(uint16_t, u16Sel, 0);
11543 IEM_MC_ARG(uint64_t, offSeg, 1);
11544 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11548 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11549 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11550 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11551 IEM_MC_END();
11552 return VINF_SUCCESS;
11553 }
11554 /* AMD falls thru. */
11555 RT_FALL_THRU();
11556
11557 case IEMMODE_32BIT:
11558 IEM_MC_BEGIN(3, 1);
11559 IEM_MC_ARG(uint16_t, u16Sel, 0);
11560 IEM_MC_ARG(uint32_t, offSeg, 1);
11561 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11565 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11566 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11567 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11568 IEM_MC_END();
11569 return VINF_SUCCESS;
11570
11571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11572 }
11573}
11574
11575
11576/**
11577 * Opcode 0xff /3.
11578 * @param bRm The RM byte.
11579 */
11580FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11581{
11582 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11583 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11584}
11585
11586
11587/**
11588 * Opcode 0xff /4.
11589 * @param bRm The RM byte.
11590 */
11591FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11592{
11593 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11595
11596 if (IEM_IS_MODRM_REG_MODE(bRm))
11597 {
11598 /* The new RIP is taken from a register. */
11599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11600 switch (pVCpu->iem.s.enmEffOpSize)
11601 {
11602 case IEMMODE_16BIT:
11603 IEM_MC_BEGIN(0, 1);
11604 IEM_MC_LOCAL(uint16_t, u16Target);
11605 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11606 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11607 IEM_MC_END()
11608 return VINF_SUCCESS;
11609
11610 case IEMMODE_32BIT:
11611 IEM_MC_BEGIN(0, 1);
11612 IEM_MC_LOCAL(uint32_t, u32Target);
11613 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11614 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11615 IEM_MC_END()
11616 return VINF_SUCCESS;
11617
11618 case IEMMODE_64BIT:
11619 IEM_MC_BEGIN(0, 1);
11620 IEM_MC_LOCAL(uint64_t, u64Target);
11621 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11622 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11623 IEM_MC_END()
11624 return VINF_SUCCESS;
11625
11626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11627 }
11628 }
11629 else
11630 {
11631 /* The new RIP is taken from a memory location. */
11632 switch (pVCpu->iem.s.enmEffOpSize)
11633 {
11634 case IEMMODE_16BIT:
11635 IEM_MC_BEGIN(0, 2);
11636 IEM_MC_LOCAL(uint16_t, u16Target);
11637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11640 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11641 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11642 IEM_MC_END()
11643 return VINF_SUCCESS;
11644
11645 case IEMMODE_32BIT:
11646 IEM_MC_BEGIN(0, 2);
11647 IEM_MC_LOCAL(uint32_t, u32Target);
11648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11651 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11652 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11653 IEM_MC_END()
11654 return VINF_SUCCESS;
11655
11656 case IEMMODE_64BIT:
11657 IEM_MC_BEGIN(0, 2);
11658 IEM_MC_LOCAL(uint64_t, u64Target);
11659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11662 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11663 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11664 IEM_MC_END()
11665 return VINF_SUCCESS;
11666
11667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11668 }
11669 }
11670}
11671
11672
11673/**
11674 * Opcode 0xff /5.
11675 * @param bRm The RM byte.
11676 */
11677FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11678{
11679 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11680 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11681}
11682
11683
11684/**
11685 * Opcode 0xff /6.
11686 * @param bRm The RM byte.
11687 */
11688FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11689{
11690 IEMOP_MNEMONIC(push_Ev, "push Ev");
11691
11692 /* Registers are handled by a common worker. */
11693 if (IEM_IS_MODRM_REG_MODE(bRm))
11694 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
11695
11696 /* Memory we do here. */
11697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11698 switch (pVCpu->iem.s.enmEffOpSize)
11699 {
11700 case IEMMODE_16BIT:
11701 IEM_MC_BEGIN(0, 2);
11702 IEM_MC_LOCAL(uint16_t, u16Src);
11703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11706 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11707 IEM_MC_PUSH_U16(u16Src);
11708 IEM_MC_ADVANCE_RIP_AND_FINISH();
11709 IEM_MC_END();
11710 break;
11711
11712 case IEMMODE_32BIT:
11713 IEM_MC_BEGIN(0, 2);
11714 IEM_MC_LOCAL(uint32_t, u32Src);
11715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11718 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11719 IEM_MC_PUSH_U32(u32Src);
11720 IEM_MC_ADVANCE_RIP_AND_FINISH();
11721 IEM_MC_END();
11722 break;
11723
11724 case IEMMODE_64BIT:
11725 IEM_MC_BEGIN(0, 2);
11726 IEM_MC_LOCAL(uint64_t, u64Src);
11727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11730 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11731 IEM_MC_PUSH_U64(u64Src);
11732 IEM_MC_ADVANCE_RIP_AND_FINISH();
11733 IEM_MC_END();
11734 break;
11735
11736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11737 }
11738}
11739
11740
11741/**
11742 * @opcode 0xff
11743 */
11744FNIEMOP_DEF(iemOp_Grp5)
11745{
11746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11747 switch (IEM_GET_MODRM_REG_8(bRm))
11748 {
11749 case 0:
11750 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11751 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11752 case 1:
11753 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11754 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11755 case 2:
11756 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11757 case 3:
11758 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11759 case 4:
11760 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11761 case 5:
11762 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11763 case 6:
11764 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11765 case 7:
11766 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11767 return IEMOP_RAISE_INVALID_OPCODE();
11768 }
11769 AssertFailedReturn(VERR_IEM_IPE_3);
11770}
11771
11772
11773
11774const PFNIEMOP g_apfnOneByteMap[256] =
11775{
11776 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11777 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11778 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11779 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11780 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11781 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11782 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11783 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11784 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11785 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11786 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11787 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11788 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11789 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11790 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11791 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11792 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11793 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11794 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11795 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11796 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11797 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11798 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11799 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11800 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11801 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11802 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11803 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11804 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11805 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11806 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11807 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11808 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11809 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11810 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11811 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11812 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11813 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11814 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11815 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11816 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11817 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11818 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11819 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11820 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11821 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11822 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11823 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11824 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11825 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11826 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11827 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11828 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11829 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11830 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11831 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11832 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11833 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11834 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11835 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11836 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11837 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11838 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11839 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11840};
11841
11842
11843/** @} */
11844
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette