VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 98122

Last change on this file since 98122 was 98103, checked in by vboxsync, 22 months ago

Copyright year updates by scm.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 396.5 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 98103 2023-01-17 14:15:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/* Instruction specification format - work in progress: */
60
61/**
62 * @opcode 0x00
63 * @opmnemonic add
64 * @op1 rm:Eb
65 * @op2 reg:Gb
66 * @opmaps one
67 * @openc ModR/M
68 * @opflmodify cf,pf,af,zf,sf,of
69 * @ophints harmless ignores_op_sizes
70 * @opstats add_Eb_Gb
71 * @opgroup og_gen_arith_bin
72 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
73 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
74 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
75 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
76 */
77FNIEMOP_DEF(iemOp_add_Eb_Gb)
78{
79 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
81}
82
83
84/**
85 * @opcode 0x01
86 * @opgroup og_gen_arith_bin
87 * @opflmodify cf,pf,af,zf,sf,of
88 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
89 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
90 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
91 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
92 */
93FNIEMOP_DEF(iemOp_add_Ev_Gv)
94{
95 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
97}
98
99
100/**
101 * @opcode 0x02
102 * @opgroup og_gen_arith_bin
103 * @opflmodify cf,pf,af,zf,sf,of
104 * @opcopytests iemOp_add_Eb_Gb
105 */
106FNIEMOP_DEF(iemOp_add_Gb_Eb)
107{
108 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
110}
111
112
113/**
114 * @opcode 0x03
115 * @opgroup og_gen_arith_bin
116 * @opflmodify cf,pf,af,zf,sf,of
117 * @opcopytests iemOp_add_Ev_Gv
118 */
119FNIEMOP_DEF(iemOp_add_Gv_Ev)
120{
121 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
123}
124
125
126/**
127 * @opcode 0x04
128 * @opgroup og_gen_arith_bin
129 * @opflmodify cf,pf,af,zf,sf,of
130 * @opcopytests iemOp_add_Eb_Gb
131 */
132FNIEMOP_DEF(iemOp_add_Al_Ib)
133{
134 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
136}
137
138
139/**
140 * @opcode 0x05
141 * @opgroup og_gen_arith_bin
142 * @opflmodify cf,pf,af,zf,sf,of
143 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
144 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
145 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
146 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
147 */
148FNIEMOP_DEF(iemOp_add_eAX_Iz)
149{
150 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
152}
153
154
155/**
156 * @opcode 0x06
157 * @opgroup og_stack_sreg
158 */
159FNIEMOP_DEF(iemOp_push_ES)
160{
161 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
162 IEMOP_HLP_NO_64BIT();
163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
164}
165
166
167/**
168 * @opcode 0x07
169 * @opgroup og_stack_sreg
170 */
171FNIEMOP_DEF(iemOp_pop_ES)
172{
173 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
174 IEMOP_HLP_NO_64BIT();
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
177}
178
179
180/**
181 * @opcode 0x08
182 * @opgroup og_gen_arith_bin
183 * @opflmodify cf,pf,af,zf,sf,of
184 * @opflundef af
185 * @opflclear of,cf
186 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
187 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
188 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
189 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
190 */
191FNIEMOP_DEF(iemOp_or_Eb_Gb)
192{
193 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
196}
197
198
199/*
200 * @opcode 0x09
201 * @opgroup og_gen_arith_bin
202 * @opflmodify cf,pf,af,zf,sf,of
203 * @opflundef af
204 * @opflclear of,cf
205 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
206 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
207 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
208 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
209 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
210 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
211 */
212FNIEMOP_DEF(iemOp_or_Ev_Gv)
213{
214 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
217}
218
219
220/**
221 * @opcode 0x0a
222 * @opgroup og_gen_arith_bin
223 * @opflmodify cf,pf,af,zf,sf,of
224 * @opflundef af
225 * @opflclear of,cf
226 * @opcopytests iemOp_or_Eb_Gb
227 */
228FNIEMOP_DEF(iemOp_or_Gb_Eb)
229{
230 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
233}
234
235
236/**
237 * @opcode 0x0b
238 * @opgroup og_gen_arith_bin
239 * @opflmodify cf,pf,af,zf,sf,of
240 * @opflundef af
241 * @opflclear of,cf
242 * @opcopytests iemOp_or_Ev_Gv
243 */
244FNIEMOP_DEF(iemOp_or_Gv_Ev)
245{
246 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0c
254 * @opgroup og_gen_arith_bin
255 * @opflmodify cf,pf,af,zf,sf,of
256 * @opflundef af
257 * @opflclear of,cf
258 * @opcopytests iemOp_or_Eb_Gb
259 */
260FNIEMOP_DEF(iemOp_or_Al_Ib)
261{
262 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
265}
266
267
268/**
269 * @opcode 0x0d
270 * @opgroup og_gen_arith_bin
271 * @opflmodify cf,pf,af,zf,sf,of
272 * @opflundef af
273 * @opflclear of,cf
274 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
275 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
276 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
277 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
278 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
279 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
280 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
281 */
282FNIEMOP_DEF(iemOp_or_eAX_Iz)
283{
284 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
287}
288
289
290/**
291 * @opcode 0x0e
292 * @opgroup og_stack_sreg
293 */
294FNIEMOP_DEF(iemOp_push_CS)
295{
296 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
297 IEMOP_HLP_NO_64BIT();
298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
299}
300
301
302/**
303 * @opcode 0x0f
304 * @opmnemonic EscTwo0f
305 * @openc two0f
306 * @opdisenum OP_2B_ESC
307 * @ophints harmless
308 * @opgroup og_escapes
309 */
310FNIEMOP_DEF(iemOp_2byteEscape)
311{
312#ifdef VBOX_STRICT
313 /* Sanity check the table the first time around. */
314 static bool s_fTested = false;
315 if (RT_LIKELY(s_fTested)) { /* likely */ }
316 else
317 {
318 s_fTested = true;
319 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
320 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
321 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
322 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
323 }
324#endif
325
326 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
327 {
328 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
329 IEMOP_HLP_MIN_286();
330 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
331 }
332 /* @opdone */
333
334 /*
335 * On the 8086 this is a POP CS instruction.
336 * For the time being we don't specify this this.
337 */
338 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
339 IEMOP_HLP_NO_64BIT();
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
342}
343
344/**
345 * @opcode 0x10
346 * @opgroup og_gen_arith_bin
347 * @opfltest cf
348 * @opflmodify cf,pf,af,zf,sf,of
349 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
350 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
351 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
352 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
353 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
354 */
355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
356{
357 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
359}
360
361
362/**
363 * @opcode 0x11
364 * @opgroup og_gen_arith_bin
365 * @opfltest cf
366 * @opflmodify cf,pf,af,zf,sf,of
367 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
368 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
369 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
370 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
371 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
372 */
373FNIEMOP_DEF(iemOp_adc_Ev_Gv)
374{
375 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x12
382 * @opgroup og_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify cf,pf,af,zf,sf,of
385 * @opcopytests iemOp_adc_Eb_Gb
386 */
387FNIEMOP_DEF(iemOp_adc_Gb_Eb)
388{
389 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
391}
392
393
394/**
395 * @opcode 0x13
396 * @opgroup og_gen_arith_bin
397 * @opfltest cf
398 * @opflmodify cf,pf,af,zf,sf,of
399 * @opcopytests iemOp_adc_Ev_Gv
400 */
401FNIEMOP_DEF(iemOp_adc_Gv_Ev)
402{
403 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
405}
406
407
408/**
409 * @opcode 0x14
410 * @opgroup og_gen_arith_bin
411 * @opfltest cf
412 * @opflmodify cf,pf,af,zf,sf,of
413 * @opcopytests iemOp_adc_Eb_Gb
414 */
415FNIEMOP_DEF(iemOp_adc_Al_Ib)
416{
417 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
419}
420
421
422/**
423 * @opcode 0x15
424 * @opgroup og_gen_arith_bin
425 * @opfltest cf
426 * @opflmodify cf,pf,af,zf,sf,of
427 * @opcopytests iemOp_adc_Ev_Gv
428 */
429FNIEMOP_DEF(iemOp_adc_eAX_Iz)
430{
431 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
433}
434
435
436/**
437 * @opcode 0x16
438 */
439FNIEMOP_DEF(iemOp_push_SS)
440{
441 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
442 IEMOP_HLP_NO_64BIT();
443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
444}
445
446
447/**
448 * @opcode 0x17
449 * @opgroup og_gen_arith_bin
450 * @opfltest cf
451 * @opflmodify cf,pf,af,zf,sf,of
452 */
453FNIEMOP_DEF(iemOp_pop_SS)
454{
455 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_HLP_NO_64BIT();
458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
459}
460
461
462/**
463 * @opcode 0x18
464 * @opgroup og_gen_arith_bin
465 * @opfltest cf
466 * @opflmodify cf,pf,af,zf,sf,of
467 */
468FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
469{
470 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
472}
473
474
475/**
476 * @opcode 0x19
477 * @opgroup og_gen_arith_bin
478 * @opfltest cf
479 * @opflmodify cf,pf,af,zf,sf,of
480 */
481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
482{
483 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
485}
486
487
488/**
489 * @opcode 0x1a
490 * @opgroup og_gen_arith_bin
491 * @opfltest cf
492 * @opflmodify cf,pf,af,zf,sf,of
493 */
494FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
495{
496 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
498}
499
500
501/**
502 * @opcode 0x1b
503 * @opgroup og_gen_arith_bin
504 * @opfltest cf
505 * @opflmodify cf,pf,af,zf,sf,of
506 */
507FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
508{
509 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
511}
512
513
514/**
515 * @opcode 0x1c
516 * @opgroup og_gen_arith_bin
517 * @opfltest cf
518 * @opflmodify cf,pf,af,zf,sf,of
519 */
520FNIEMOP_DEF(iemOp_sbb_Al_Ib)
521{
522 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
524}
525
526
527/**
528 * @opcode 0x1d
529 * @opgroup og_gen_arith_bin
530 * @opfltest cf
531 * @opflmodify cf,pf,af,zf,sf,of
532 */
533FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
534{
535 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
537}
538
539
540/**
541 * @opcode 0x1e
542 * @opgroup og_stack_sreg
543 */
544FNIEMOP_DEF(iemOp_push_DS)
545{
546 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
547 IEMOP_HLP_NO_64BIT();
548 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
549}
550
551
552/**
553 * @opcode 0x1f
554 * @opgroup og_stack_sreg
555 */
556FNIEMOP_DEF(iemOp_pop_DS)
557{
558 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEMOP_HLP_NO_64BIT();
561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
562}
563
564
565/**
566 * @opcode 0x20
567 * @opgroup og_gen_arith_bin
568 * @opflmodify cf,pf,af,zf,sf,of
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Eb_Gb)
573{
574 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x21
582 * @opgroup og_gen_arith_bin
583 * @opflmodify cf,pf,af,zf,sf,of
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Ev_Gv)
588{
589 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x22
597 * @opgroup og_gen_arith_bin
598 * @opflmodify cf,pf,af,zf,sf,of
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Gb_Eb)
603{
604 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x23
612 * @opgroup og_gen_arith_bin
613 * @opflmodify cf,pf,af,zf,sf,of
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_Gv_Ev)
618{
619 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x24
627 * @opgroup og_gen_arith_bin
628 * @opflmodify cf,pf,af,zf,sf,of
629 * @opflundef af
630 * @opflclear of,cf
631 */
632FNIEMOP_DEF(iemOp_and_Al_Ib)
633{
634 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
637}
638
639
640/**
641 * @opcode 0x25
642 * @opgroup og_gen_arith_bin
643 * @opflmodify cf,pf,af,zf,sf,of
644 * @opflundef af
645 * @opflclear of,cf
646 */
647FNIEMOP_DEF(iemOp_and_eAX_Iz)
648{
649 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
652}
653
654
655/**
656 * @opcode 0x26
657 * @opmnemonic SEG
658 * @op1 ES
659 * @opgroup og_prefix
660 * @openc prefix
661 * @opdisenum OP_SEG
662 * @ophints harmless
663 */
664FNIEMOP_DEF(iemOp_seg_ES)
665{
666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
668 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
669
670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
672}
673
674
675/**
676 * @opcode 0x27
677 * @opfltest af,cf
678 * @opflmodify cf,pf,af,zf,sf,of
679 * @opflundef of
680 */
681FNIEMOP_DEF(iemOp_daa)
682{
683 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
684 IEMOP_HLP_NO_64BIT();
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
688}
689
690
691/**
692 * @opcode 0x28
693 * @opgroup og_gen_arith_bin
694 * @opflmodify cf,pf,af,zf,sf,of
695 */
696FNIEMOP_DEF(iemOp_sub_Eb_Gb)
697{
698 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
700}
701
702
703/**
704 * @opcode 0x29
705 * @opgroup og_gen_arith_bin
706 * @opflmodify cf,pf,af,zf,sf,of
707 */
708FNIEMOP_DEF(iemOp_sub_Ev_Gv)
709{
710 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
712}
713
714
715/**
716 * @opcode 0x2a
717 * @opgroup og_gen_arith_bin
718 * @opflmodify cf,pf,af,zf,sf,of
719 */
720FNIEMOP_DEF(iemOp_sub_Gb_Eb)
721{
722 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
724}
725
726
727/**
728 * @opcode 0x2b
729 * @opgroup og_gen_arith_bin
730 * @opflmodify cf,pf,af,zf,sf,of
731 */
732FNIEMOP_DEF(iemOp_sub_Gv_Ev)
733{
734 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
736}
737
738
739/**
740 * @opcode 0x2c
741 * @opgroup og_gen_arith_bin
742 * @opflmodify cf,pf,af,zf,sf,of
743 */
744FNIEMOP_DEF(iemOp_sub_Al_Ib)
745{
746 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
748}
749
750
751/**
752 * @opcode 0x2d
753 * @opgroup og_gen_arith_bin
754 * @opflmodify cf,pf,af,zf,sf,of
755 */
756FNIEMOP_DEF(iemOp_sub_eAX_Iz)
757{
758 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
760}
761
762
763/**
764 * @opcode 0x2e
765 * @opmnemonic SEG
766 * @op1 CS
767 * @opgroup og_prefix
768 * @openc prefix
769 * @opdisenum OP_SEG
770 * @ophints harmless
771 */
772FNIEMOP_DEF(iemOp_seg_CS)
773{
774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
775 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
776 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
777
778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
780}
781
782
783/**
784 * @opcode 0x2f
785 * @opfltest af,cf
786 * @opflmodify cf,pf,af,zf,sf,of
787 * @opflundef of
788 */
789FNIEMOP_DEF(iemOp_das)
790{
791 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
796}
797
798
799/**
800 * @opcode 0x30
801 * @opgroup og_gen_arith_bin
802 * @opflmodify cf,pf,af,zf,sf,of
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Eb_Gb)
807{
808 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x31
816 * @opgroup og_gen_arith_bin
817 * @opflmodify cf,pf,af,zf,sf,of
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Ev_Gv)
822{
823 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x32
831 * @opgroup og_gen_arith_bin
832 * @opflmodify cf,pf,af,zf,sf,of
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Gb_Eb)
837{
838 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x33
846 * @opgroup og_gen_arith_bin
847 * @opflmodify cf,pf,af,zf,sf,of
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_Gv_Ev)
852{
853 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x34
861 * @opgroup og_gen_arith_bin
862 * @opflmodify cf,pf,af,zf,sf,of
863 * @opflundef af
864 * @opflclear of,cf
865 */
866FNIEMOP_DEF(iemOp_xor_Al_Ib)
867{
868 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
871}
872
873
874/**
875 * @opcode 0x35
876 * @opgroup og_gen_arith_bin
877 * @opflmodify cf,pf,af,zf,sf,of
878 * @opflundef af
879 * @opflclear of,cf
880 */
881FNIEMOP_DEF(iemOp_xor_eAX_Iz)
882{
883 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
886}
887
888
889/**
890 * @opcode 0x36
891 * @opmnemonic SEG
892 * @op1 SS
893 * @opgroup og_prefix
894 * @openc prefix
895 * @opdisenum OP_SEG
896 * @ophints harmless
897 */
898FNIEMOP_DEF(iemOp_seg_SS)
899{
900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
902 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
903
904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
906}
907
908
909/**
910 * @opcode 0x37
911 * @opfltest af,cf
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef pf,zf,sf,of
914 * @opgroup og_gen_arith_dec
915 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
916 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
917 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
918 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
919 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
920 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
921 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
922 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
925 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
926 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
927 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
928 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
929 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
930 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
931 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
933 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
936 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
937 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
938 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
939 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
940 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
941 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
942 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
943 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
944 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
945 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
946 */
947FNIEMOP_DEF(iemOp_aaa)
948{
949 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
950 IEMOP_HLP_NO_64BIT();
951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
953
954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
955}
956
957
958/**
959 * @opcode 0x38
960 */
961FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
962{
963 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x39
970 */
971FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
972{
973 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3a
980 */
981FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
982{
983 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3b
990 */
991FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
992{
993 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3c
1000 */
1001FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1002{
1003 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3d
1010 */
1011FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1012{
1013 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1015}
1016
1017
1018/**
1019 * @opcode 0x3e
1020 */
1021FNIEMOP_DEF(iemOp_seg_DS)
1022{
1023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1025 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1026
1027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1029}
1030
1031
1032/**
1033 * @opcode 0x3f
1034 * @opfltest af,cf
1035 * @opflmodify cf,pf,af,zf,sf,of
1036 * @opflundef pf,zf,sf,of
1037 * @opgroup og_gen_arith_dec
1038 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1039 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1040 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1041 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1042 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1043 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1045 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1046 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1047 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1048 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1049 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1050 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1051 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1052 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1053 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1058 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1059 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1060 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1061 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1062 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1063 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1064 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1065 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1066 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1067 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1068 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1069 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1070 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1071 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1072 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1073 */
1074FNIEMOP_DEF(iemOp_aas)
1075{
1076 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1077 IEMOP_HLP_NO_64BIT();
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1080
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1082}
1083
1084
1085/**
1086 * Common 'inc/dec/not/neg register' helper.
1087 */
1088FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1089{
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 switch (pVCpu->iem.s.enmEffOpSize)
1092 {
1093 case IEMMODE_16BIT:
1094 IEM_MC_BEGIN(2, 0);
1095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1096 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1097 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1098 IEM_MC_REF_EFLAGS(pEFlags);
1099 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1100 IEM_MC_ADVANCE_RIP_AND_FINISH();
1101 IEM_MC_END();
1102 return VINF_SUCCESS;
1103
1104 case IEMMODE_32BIT:
1105 IEM_MC_BEGIN(2, 0);
1106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1107 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1108 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1109 IEM_MC_REF_EFLAGS(pEFlags);
1110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1112 IEM_MC_ADVANCE_RIP_AND_FINISH();
1113 IEM_MC_END();
1114 return VINF_SUCCESS;
1115
1116 case IEMMODE_64BIT:
1117 IEM_MC_BEGIN(2, 0);
1118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1119 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1120 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1121 IEM_MC_REF_EFLAGS(pEFlags);
1122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1123 IEM_MC_ADVANCE_RIP_AND_FINISH();
1124 IEM_MC_END();
1125 return VINF_SUCCESS;
1126
1127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1128 }
1129}
1130
1131
1132/**
1133 * @opcode 0x40
1134 */
1135FNIEMOP_DEF(iemOp_inc_eAX)
1136{
1137 /*
1138 * This is a REX prefix in 64-bit mode.
1139 */
1140 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1141 {
1142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1144
1145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1147 }
1148
1149 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1150 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1151}
1152
1153
1154/**
1155 * @opcode 0x41
1156 */
1157FNIEMOP_DEF(iemOp_inc_eCX)
1158{
1159 /*
1160 * This is a REX prefix in 64-bit mode.
1161 */
1162 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1163 {
1164 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1165 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1166 pVCpu->iem.s.uRexB = 1 << 3;
1167
1168 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1169 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1170 }
1171
1172 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1173 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1174}
1175
1176
1177/**
1178 * @opcode 0x42
1179 */
1180FNIEMOP_DEF(iemOp_inc_eDX)
1181{
1182 /*
1183 * This is a REX prefix in 64-bit mode.
1184 */
1185 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1186 {
1187 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1188 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1189 pVCpu->iem.s.uRexIndex = 1 << 3;
1190
1191 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1192 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1193 }
1194
1195 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1196 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1197}
1198
1199
1200
1201/**
1202 * @opcode 0x43
1203 */
1204FNIEMOP_DEF(iemOp_inc_eBX)
1205{
1206 /*
1207 * This is a REX prefix in 64-bit mode.
1208 */
1209 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1210 {
1211 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1212 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1213 pVCpu->iem.s.uRexB = 1 << 3;
1214 pVCpu->iem.s.uRexIndex = 1 << 3;
1215
1216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1217 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1218 }
1219
1220 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1221 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1222}
1223
1224
1225/**
1226 * @opcode 0x44
1227 */
1228FNIEMOP_DEF(iemOp_inc_eSP)
1229{
1230 /*
1231 * This is a REX prefix in 64-bit mode.
1232 */
1233 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1234 {
1235 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1237 pVCpu->iem.s.uRexReg = 1 << 3;
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241 }
1242
1243 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1244 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1245}
1246
1247
1248/**
1249 * @opcode 0x45
1250 */
1251FNIEMOP_DEF(iemOp_inc_eBP)
1252{
1253 /*
1254 * This is a REX prefix in 64-bit mode.
1255 */
1256 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1257 {
1258 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1259 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1260 pVCpu->iem.s.uRexReg = 1 << 3;
1261 pVCpu->iem.s.uRexB = 1 << 3;
1262
1263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1264 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1265 }
1266
1267 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1268 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1269}
1270
1271
1272/**
1273 * @opcode 0x46
1274 */
1275FNIEMOP_DEF(iemOp_inc_eSI)
1276{
1277 /*
1278 * This is a REX prefix in 64-bit mode.
1279 */
1280 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1281 {
1282 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1283 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1284 pVCpu->iem.s.uRexReg = 1 << 3;
1285 pVCpu->iem.s.uRexIndex = 1 << 3;
1286
1287 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1288 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1289 }
1290
1291 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1292 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1293}
1294
1295
1296/**
1297 * @opcode 0x47
1298 */
1299FNIEMOP_DEF(iemOp_inc_eDI)
1300{
1301 /*
1302 * This is a REX prefix in 64-bit mode.
1303 */
1304 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1305 {
1306 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1307 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1308 pVCpu->iem.s.uRexReg = 1 << 3;
1309 pVCpu->iem.s.uRexB = 1 << 3;
1310 pVCpu->iem.s.uRexIndex = 1 << 3;
1311
1312 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1313 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1314 }
1315
1316 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1317 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1318}
1319
1320
1321/**
1322 * @opcode 0x48
1323 */
1324FNIEMOP_DEF(iemOp_dec_eAX)
1325{
1326 /*
1327 * This is a REX prefix in 64-bit mode.
1328 */
1329 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1330 {
1331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1332 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1333 iemRecalEffOpSize(pVCpu);
1334
1335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1337 }
1338
1339 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1340 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1341}
1342
1343
1344/**
1345 * @opcode 0x49
1346 */
1347FNIEMOP_DEF(iemOp_dec_eCX)
1348{
1349 /*
1350 * This is a REX prefix in 64-bit mode.
1351 */
1352 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1353 {
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1356 pVCpu->iem.s.uRexB = 1 << 3;
1357 iemRecalEffOpSize(pVCpu);
1358
1359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1361 }
1362
1363 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1364 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1365}
1366
1367
1368/**
1369 * @opcode 0x4a
1370 */
1371FNIEMOP_DEF(iemOp_dec_eDX)
1372{
1373 /*
1374 * This is a REX prefix in 64-bit mode.
1375 */
1376 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1377 {
1378 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1379 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1380 pVCpu->iem.s.uRexIndex = 1 << 3;
1381 iemRecalEffOpSize(pVCpu);
1382
1383 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1384 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1385 }
1386
1387 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1388 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1389}
1390
1391
1392/**
1393 * @opcode 0x4b
1394 */
1395FNIEMOP_DEF(iemOp_dec_eBX)
1396{
1397 /*
1398 * This is a REX prefix in 64-bit mode.
1399 */
1400 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1401 {
1402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1404 pVCpu->iem.s.uRexB = 1 << 3;
1405 pVCpu->iem.s.uRexIndex = 1 << 3;
1406 iemRecalEffOpSize(pVCpu);
1407
1408 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1409 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1410 }
1411
1412 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1413 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1414}
1415
1416
1417/**
1418 * @opcode 0x4c
1419 */
1420FNIEMOP_DEF(iemOp_dec_eSP)
1421{
1422 /*
1423 * This is a REX prefix in 64-bit mode.
1424 */
1425 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1426 {
1427 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1428 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1429 pVCpu->iem.s.uRexReg = 1 << 3;
1430 iemRecalEffOpSize(pVCpu);
1431
1432 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1433 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1434 }
1435
1436 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1437 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1438}
1439
1440
1441/**
1442 * @opcode 0x4d
1443 */
1444FNIEMOP_DEF(iemOp_dec_eBP)
1445{
1446 /*
1447 * This is a REX prefix in 64-bit mode.
1448 */
1449 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1450 {
1451 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1453 pVCpu->iem.s.uRexReg = 1 << 3;
1454 pVCpu->iem.s.uRexB = 1 << 3;
1455 iemRecalEffOpSize(pVCpu);
1456
1457 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1458 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1459 }
1460
1461 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1462 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1463}
1464
1465
1466/**
1467 * @opcode 0x4e
1468 */
1469FNIEMOP_DEF(iemOp_dec_eSI)
1470{
1471 /*
1472 * This is a REX prefix in 64-bit mode.
1473 */
1474 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1475 {
1476 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1477 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1478 pVCpu->iem.s.uRexReg = 1 << 3;
1479 pVCpu->iem.s.uRexIndex = 1 << 3;
1480 iemRecalEffOpSize(pVCpu);
1481
1482 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1483 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1484 }
1485
1486 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1487 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1488}
1489
1490
1491/**
1492 * @opcode 0x4f
1493 */
1494FNIEMOP_DEF(iemOp_dec_eDI)
1495{
1496 /*
1497 * This is a REX prefix in 64-bit mode.
1498 */
1499 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1500 {
1501 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1502 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1503 pVCpu->iem.s.uRexReg = 1 << 3;
1504 pVCpu->iem.s.uRexB = 1 << 3;
1505 pVCpu->iem.s.uRexIndex = 1 << 3;
1506 iemRecalEffOpSize(pVCpu);
1507
1508 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1509 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1510 }
1511
1512 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1513 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1514}
1515
1516
1517/**
1518 * Common 'push register' helper.
1519 */
1520FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1521{
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1524 {
1525 iReg |= pVCpu->iem.s.uRexB;
1526 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1527 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1528 }
1529
1530 switch (pVCpu->iem.s.enmEffOpSize)
1531 {
1532 case IEMMODE_16BIT:
1533 IEM_MC_BEGIN(0, 1);
1534 IEM_MC_LOCAL(uint16_t, u16Value);
1535 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1536 IEM_MC_PUSH_U16(u16Value);
1537 IEM_MC_ADVANCE_RIP_AND_FINISH();
1538 IEM_MC_END();
1539 break;
1540
1541 case IEMMODE_32BIT:
1542 IEM_MC_BEGIN(0, 1);
1543 IEM_MC_LOCAL(uint32_t, u32Value);
1544 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1545 IEM_MC_PUSH_U32(u32Value);
1546 IEM_MC_ADVANCE_RIP_AND_FINISH();
1547 IEM_MC_END();
1548 break;
1549
1550 case IEMMODE_64BIT:
1551 IEM_MC_BEGIN(0, 1);
1552 IEM_MC_LOCAL(uint64_t, u64Value);
1553 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1554 IEM_MC_PUSH_U64(u64Value);
1555 IEM_MC_ADVANCE_RIP_AND_FINISH();
1556 IEM_MC_END();
1557 break;
1558
1559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1560 }
1561}
1562
1563
1564/**
1565 * @opcode 0x50
1566 */
1567FNIEMOP_DEF(iemOp_push_eAX)
1568{
1569 IEMOP_MNEMONIC(push_rAX, "push rAX");
1570 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1571}
1572
1573
1574/**
1575 * @opcode 0x51
1576 */
1577FNIEMOP_DEF(iemOp_push_eCX)
1578{
1579 IEMOP_MNEMONIC(push_rCX, "push rCX");
1580 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1581}
1582
1583
1584/**
1585 * @opcode 0x52
1586 */
1587FNIEMOP_DEF(iemOp_push_eDX)
1588{
1589 IEMOP_MNEMONIC(push_rDX, "push rDX");
1590 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1591}
1592
1593
1594/**
1595 * @opcode 0x53
1596 */
1597FNIEMOP_DEF(iemOp_push_eBX)
1598{
1599 IEMOP_MNEMONIC(push_rBX, "push rBX");
1600 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1601}
1602
1603
1604/**
1605 * @opcode 0x54
1606 */
1607FNIEMOP_DEF(iemOp_push_eSP)
1608{
1609 IEMOP_MNEMONIC(push_rSP, "push rSP");
1610 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1611 {
1612 IEM_MC_BEGIN(0, 1);
1613 IEM_MC_LOCAL(uint16_t, u16Value);
1614 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1615 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1616 IEM_MC_PUSH_U16(u16Value);
1617 IEM_MC_ADVANCE_RIP_AND_FINISH();
1618 IEM_MC_END();
1619 }
1620 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1621}
1622
1623
1624/**
1625 * @opcode 0x55
1626 */
1627FNIEMOP_DEF(iemOp_push_eBP)
1628{
1629 IEMOP_MNEMONIC(push_rBP, "push rBP");
1630 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1631}
1632
1633
1634/**
1635 * @opcode 0x56
1636 */
1637FNIEMOP_DEF(iemOp_push_eSI)
1638{
1639 IEMOP_MNEMONIC(push_rSI, "push rSI");
1640 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1641}
1642
1643
1644/**
1645 * @opcode 0x57
1646 */
1647FNIEMOP_DEF(iemOp_push_eDI)
1648{
1649 IEMOP_MNEMONIC(push_rDI, "push rDI");
1650 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1651}
1652
1653
1654/**
1655 * Common 'pop register' helper.
1656 */
1657FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1658{
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1661 {
1662 iReg |= pVCpu->iem.s.uRexB;
1663 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1664 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1665 }
1666
1667 switch (pVCpu->iem.s.enmEffOpSize)
1668 {
1669 case IEMMODE_16BIT:
1670 IEM_MC_BEGIN(0, 1);
1671 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1672 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1673 IEM_MC_POP_U16(pu16Dst);
1674 IEM_MC_ADVANCE_RIP_AND_FINISH();
1675 IEM_MC_END();
1676 break;
1677
1678 case IEMMODE_32BIT:
1679 IEM_MC_BEGIN(0, 1);
1680 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1681 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1682 IEM_MC_POP_U32(pu32Dst);
1683 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1684 IEM_MC_ADVANCE_RIP_AND_FINISH();
1685 IEM_MC_END();
1686 break;
1687
1688 case IEMMODE_64BIT:
1689 IEM_MC_BEGIN(0, 1);
1690 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1691 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1692 IEM_MC_POP_U64(pu64Dst);
1693 IEM_MC_ADVANCE_RIP_AND_FINISH();
1694 IEM_MC_END();
1695 break;
1696
1697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1698 }
1699}
1700
1701
1702/**
1703 * @opcode 0x58
1704 */
1705FNIEMOP_DEF(iemOp_pop_eAX)
1706{
1707 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1708 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1709}
1710
1711
1712/**
1713 * @opcode 0x59
1714 */
1715FNIEMOP_DEF(iemOp_pop_eCX)
1716{
1717 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1718 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1719}
1720
1721
1722/**
1723 * @opcode 0x5a
1724 */
1725FNIEMOP_DEF(iemOp_pop_eDX)
1726{
1727 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1728 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1729}
1730
1731
1732/**
1733 * @opcode 0x5b
1734 */
1735FNIEMOP_DEF(iemOp_pop_eBX)
1736{
1737 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1738 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1739}
1740
1741
1742/**
1743 * @opcode 0x5c
1744 */
1745FNIEMOP_DEF(iemOp_pop_eSP)
1746{
1747 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1748 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1749 {
1750 if (pVCpu->iem.s.uRexB)
1751 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1752 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1753 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1754 }
1755
1756 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1757 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1758 /** @todo add testcase for this instruction. */
1759 switch (pVCpu->iem.s.enmEffOpSize)
1760 {
1761 case IEMMODE_16BIT:
1762 IEM_MC_BEGIN(0, 1);
1763 IEM_MC_LOCAL(uint16_t, u16Dst);
1764 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1765 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1766 IEM_MC_ADVANCE_RIP_AND_FINISH();
1767 IEM_MC_END();
1768 break;
1769
1770 case IEMMODE_32BIT:
1771 IEM_MC_BEGIN(0, 1);
1772 IEM_MC_LOCAL(uint32_t, u32Dst);
1773 IEM_MC_POP_U32(&u32Dst);
1774 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1775 IEM_MC_ADVANCE_RIP_AND_FINISH();
1776 IEM_MC_END();
1777 break;
1778
1779 case IEMMODE_64BIT:
1780 IEM_MC_BEGIN(0, 1);
1781 IEM_MC_LOCAL(uint64_t, u64Dst);
1782 IEM_MC_POP_U64(&u64Dst);
1783 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1784 IEM_MC_ADVANCE_RIP_AND_FINISH();
1785 IEM_MC_END();
1786 break;
1787
1788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1789 }
1790}
1791
1792
1793/**
1794 * @opcode 0x5d
1795 */
1796FNIEMOP_DEF(iemOp_pop_eBP)
1797{
1798 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1799 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1800}
1801
1802
1803/**
1804 * @opcode 0x5e
1805 */
1806FNIEMOP_DEF(iemOp_pop_eSI)
1807{
1808 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1809 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1810}
1811
1812
1813/**
1814 * @opcode 0x5f
1815 */
1816FNIEMOP_DEF(iemOp_pop_eDI)
1817{
1818 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1819 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1820}
1821
1822
1823/**
1824 * @opcode 0x60
1825 */
1826FNIEMOP_DEF(iemOp_pusha)
1827{
1828 IEMOP_MNEMONIC(pusha, "pusha");
1829 IEMOP_HLP_MIN_186();
1830 IEMOP_HLP_NO_64BIT();
1831 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1833 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1834 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1835}
1836
1837
1838/**
1839 * @opcode 0x61
1840 */
1841FNIEMOP_DEF(iemOp_popa__mvex)
1842{
1843 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1844 {
1845 IEMOP_MNEMONIC(popa, "popa");
1846 IEMOP_HLP_MIN_186();
1847 IEMOP_HLP_NO_64BIT();
1848 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1849 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1850 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1851 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1852 }
1853 IEMOP_MNEMONIC(mvex, "mvex");
1854 Log(("mvex prefix is not supported!\n"));
1855 return IEMOP_RAISE_INVALID_OPCODE();
1856}
1857
1858
1859/**
1860 * @opcode 0x62
1861 * @opmnemonic bound
1862 * @op1 Gv_RO
1863 * @op2 Ma
1864 * @opmincpu 80186
1865 * @ophints harmless invalid_64
1866 * @optest op1=0 op2=0 ->
1867 * @optest op1=1 op2=0 -> value.xcpt=5
1868 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1869 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1870 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1871 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1872 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1873 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1874 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1875 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1876 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1877 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1878 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1879 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1880 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1881 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1882 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1883 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1884 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1885 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1886 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1887 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1888 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1889 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1890 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1891 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1892 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1893 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1894 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1895 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1896 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1897 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1898 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1899 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1900 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1901 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1902 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1903 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1904 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1905 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1906 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1907 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1908 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1909 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1910 */
1911FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1912{
1913 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1914 compatability mode it is invalid with MOD=3.
1915
1916 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1917 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1918 given as R and X without an exact description, so we assume it builds on
1919 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1920 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1921 uint8_t bRm;
1922 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1923 {
1924 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1925 IEMOP_HLP_MIN_186();
1926 IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if (IEM_IS_MODRM_MEM_MODE(bRm))
1928 {
1929 /** @todo testcase: check that there are two memory accesses involved. Check
1930 * whether they're both read before the \#BR triggers. */
1931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1932 {
1933 IEM_MC_BEGIN(3, 1);
1934 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1935 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1936 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1941
1942 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
1943 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1944 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1945
1946 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1947 IEM_MC_END();
1948 }
1949 else /* 32-bit operands */
1950 {
1951 IEM_MC_BEGIN(3, 1);
1952 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1953 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1954 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1956
1957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959
1960 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
1961 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1962 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1963
1964 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1965 IEM_MC_END();
1966 }
1967 }
1968
1969 /*
1970 * @opdone
1971 */
1972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1973 {
1974 /* Note that there is no need for the CPU to fetch further bytes
1975 here because MODRM.MOD == 3. */
1976 Log(("evex not supported by the guest CPU!\n"));
1977 return IEMOP_RAISE_INVALID_OPCODE();
1978 }
1979 }
1980 else
1981 {
1982 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1983 * does modr/m read, whereas AMD probably doesn't... */
1984 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1985 {
1986 Log(("evex not supported by the guest CPU!\n"));
1987 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1988 }
1989 IEM_OPCODE_GET_NEXT_U8(&bRm);
1990 }
1991
1992 IEMOP_MNEMONIC(evex, "evex");
1993 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1994 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1995 Log(("evex prefix is not implemented!\n"));
1996 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1997}
1998
1999
2000/** Opcode 0x63 - non-64-bit modes. */
2001FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2002{
2003 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2004 IEMOP_HLP_MIN_286();
2005 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2007
2008 if (IEM_IS_MODRM_REG_MODE(bRm))
2009 {
2010 /* Register */
2011 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2012 IEM_MC_BEGIN(3, 0);
2013 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2014 IEM_MC_ARG(uint16_t, u16Src, 1);
2015 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2016
2017 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2018 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2019 IEM_MC_REF_EFLAGS(pEFlags);
2020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2021
2022 IEM_MC_ADVANCE_RIP_AND_FINISH();
2023 IEM_MC_END();
2024 }
2025 else
2026 {
2027 /* Memory */
2028 IEM_MC_BEGIN(3, 2);
2029 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2030 IEM_MC_ARG(uint16_t, u16Src, 1);
2031 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2035 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2036 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2037 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2038 IEM_MC_FETCH_EFLAGS(EFlags);
2039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2040
2041 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2042 IEM_MC_COMMIT_EFLAGS(EFlags);
2043 IEM_MC_ADVANCE_RIP_AND_FINISH();
2044 IEM_MC_END();
2045 }
2046}
2047
2048
2049/**
2050 * @opcode 0x63
2051 *
2052 * @note This is a weird one. It works like a regular move instruction if
2053 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2054 * @todo This definitely needs a testcase to verify the odd cases. */
2055FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2056{
2057 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2058
2059 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061
2062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2063 {
2064 if (IEM_IS_MODRM_REG_MODE(bRm))
2065 {
2066 /*
2067 * Register to register.
2068 */
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_BEGIN(0, 1);
2071 IEM_MC_LOCAL(uint64_t, u64Value);
2072 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2073 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 /*
2080 * We're loading a register from memory.
2081 */
2082 IEM_MC_BEGIN(0, 2);
2083 IEM_MC_LOCAL(uint64_t, u64Value);
2084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2088 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2089 IEM_MC_ADVANCE_RIP_AND_FINISH();
2090 IEM_MC_END();
2091 }
2092 }
2093 else
2094 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2095}
2096
2097
2098/**
2099 * @opcode 0x64
2100 * @opmnemonic segfs
2101 * @opmincpu 80386
2102 * @opgroup og_prefixes
2103 */
2104FNIEMOP_DEF(iemOp_seg_FS)
2105{
2106 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2107 IEMOP_HLP_MIN_386();
2108
2109 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2110 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2111
2112 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2113 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2114}
2115
2116
2117/**
2118 * @opcode 0x65
2119 * @opmnemonic seggs
2120 * @opmincpu 80386
2121 * @opgroup og_prefixes
2122 */
2123FNIEMOP_DEF(iemOp_seg_GS)
2124{
2125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2126 IEMOP_HLP_MIN_386();
2127
2128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2129 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2130
2131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2133}
2134
2135
2136/**
2137 * @opcode 0x66
2138 * @opmnemonic opsize
2139 * @openc prefix
2140 * @opmincpu 80386
2141 * @ophints harmless
2142 * @opgroup og_prefixes
2143 */
2144FNIEMOP_DEF(iemOp_op_size)
2145{
2146 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2147 IEMOP_HLP_MIN_386();
2148
2149 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2150 iemRecalEffOpSize(pVCpu);
2151
2152 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2153 when REPZ or REPNZ are present. */
2154 if (pVCpu->iem.s.idxPrefix == 0)
2155 pVCpu->iem.s.idxPrefix = 1;
2156
2157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2159}
2160
2161
2162/**
2163 * @opcode 0x67
2164 * @opmnemonic addrsize
2165 * @openc prefix
2166 * @opmincpu 80386
2167 * @ophints harmless
2168 * @opgroup og_prefixes
2169 */
2170FNIEMOP_DEF(iemOp_addr_size)
2171{
2172 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2173 IEMOP_HLP_MIN_386();
2174
2175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2176 switch (pVCpu->iem.s.enmDefAddrMode)
2177 {
2178 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2179 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2180 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2181 default: AssertFailed();
2182 }
2183
2184 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2185 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2186}
2187
2188
2189/**
2190 * @opcode 0x68
2191 */
2192FNIEMOP_DEF(iemOp_push_Iz)
2193{
2194 IEMOP_MNEMONIC(push_Iz, "push Iz");
2195 IEMOP_HLP_MIN_186();
2196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2197 switch (pVCpu->iem.s.enmEffOpSize)
2198 {
2199 case IEMMODE_16BIT:
2200 {
2201 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2203 IEM_MC_BEGIN(0,0);
2204 IEM_MC_PUSH_U16(u16Imm);
2205 IEM_MC_ADVANCE_RIP_AND_FINISH();
2206 IEM_MC_END();
2207 return VINF_SUCCESS;
2208 }
2209
2210 case IEMMODE_32BIT:
2211 {
2212 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_BEGIN(0,0);
2215 IEM_MC_PUSH_U32(u32Imm);
2216 IEM_MC_ADVANCE_RIP_AND_FINISH();
2217 IEM_MC_END();
2218 return VINF_SUCCESS;
2219 }
2220
2221 case IEMMODE_64BIT:
2222 {
2223 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2225 IEM_MC_BEGIN(0,0);
2226 IEM_MC_PUSH_U64(u64Imm);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 return VINF_SUCCESS;
2230 }
2231
2232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2233 }
2234}
2235
2236
2237/**
2238 * @opcode 0x69
2239 */
2240FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2241{
2242 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2243 IEMOP_HLP_MIN_186();
2244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2246
2247 switch (pVCpu->iem.s.enmEffOpSize)
2248 {
2249 case IEMMODE_16BIT:
2250 {
2251 if (IEM_IS_MODRM_REG_MODE(bRm))
2252 {
2253 /* register operand */
2254 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2256
2257 IEM_MC_BEGIN(3, 1);
2258 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2259 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2261 IEM_MC_LOCAL(uint16_t, u16Tmp);
2262
2263 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2264 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2265 IEM_MC_REF_EFLAGS(pEFlags);
2266 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2267 pu16Dst, u16Src, pEFlags);
2268 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2269
2270 IEM_MC_ADVANCE_RIP_AND_FINISH();
2271 IEM_MC_END();
2272 }
2273 else
2274 {
2275 /* memory operand */
2276 IEM_MC_BEGIN(3, 2);
2277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2278 IEM_MC_ARG(uint16_t, u16Src, 1);
2279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2280 IEM_MC_LOCAL(uint16_t, u16Tmp);
2281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2282
2283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2284 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2285 IEM_MC_ASSIGN(u16Src, u16Imm);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2287 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2288 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2289 IEM_MC_REF_EFLAGS(pEFlags);
2290 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2291 pu16Dst, u16Src, pEFlags);
2292 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2293
2294 IEM_MC_ADVANCE_RIP_AND_FINISH();
2295 IEM_MC_END();
2296 }
2297 return VINF_SUCCESS;
2298 }
2299
2300 case IEMMODE_32BIT:
2301 {
2302 if (IEM_IS_MODRM_REG_MODE(bRm))
2303 {
2304 /* register operand */
2305 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2307
2308 IEM_MC_BEGIN(3, 1);
2309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2310 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2311 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2312 IEM_MC_LOCAL(uint32_t, u32Tmp);
2313
2314 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2315 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2316 IEM_MC_REF_EFLAGS(pEFlags);
2317 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2318 pu32Dst, u32Src, pEFlags);
2319 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2320
2321 IEM_MC_ADVANCE_RIP_AND_FINISH();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 /* memory operand */
2327 IEM_MC_BEGIN(3, 2);
2328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2329 IEM_MC_ARG(uint32_t, u32Src, 1);
2330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2331 IEM_MC_LOCAL(uint32_t, u32Tmp);
2332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2333
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2335 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2336 IEM_MC_ASSIGN(u32Src, u32Imm);
2337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2338 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2339 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2340 IEM_MC_REF_EFLAGS(pEFlags);
2341 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2342 pu32Dst, u32Src, pEFlags);
2343 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348 return VINF_SUCCESS;
2349 }
2350
2351 case IEMMODE_64BIT:
2352 {
2353 if (IEM_IS_MODRM_REG_MODE(bRm))
2354 {
2355 /* register operand */
2356 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2358
2359 IEM_MC_BEGIN(3, 1);
2360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2361 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2362 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2363 IEM_MC_LOCAL(uint64_t, u64Tmp);
2364
2365 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2366 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2367 IEM_MC_REF_EFLAGS(pEFlags);
2368 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2369 pu64Dst, u64Src, pEFlags);
2370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2371
2372 IEM_MC_ADVANCE_RIP_AND_FINISH();
2373 IEM_MC_END();
2374 }
2375 else
2376 {
2377 /* memory operand */
2378 IEM_MC_BEGIN(3, 2);
2379 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2380 IEM_MC_ARG(uint64_t, u64Src, 1);
2381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2382 IEM_MC_LOCAL(uint64_t, u64Tmp);
2383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2384
2385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2386 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2387 IEM_MC_ASSIGN(u64Src, u64Imm);
2388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2389 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2390 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2391 IEM_MC_REF_EFLAGS(pEFlags);
2392 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2393 pu64Dst, u64Src, pEFlags);
2394 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2395
2396 IEM_MC_ADVANCE_RIP_AND_FINISH();
2397 IEM_MC_END();
2398 }
2399 return VINF_SUCCESS;
2400 }
2401
2402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2403 }
2404}
2405
2406
2407/**
2408 * @opcode 0x6a
2409 */
2410FNIEMOP_DEF(iemOp_push_Ib)
2411{
2412 IEMOP_MNEMONIC(push_Ib, "push Ib");
2413 IEMOP_HLP_MIN_186();
2414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2417
2418 IEM_MC_BEGIN(0,0);
2419 switch (pVCpu->iem.s.enmEffOpSize)
2420 {
2421 case IEMMODE_16BIT:
2422 IEM_MC_PUSH_U16(i8Imm);
2423 break;
2424 case IEMMODE_32BIT:
2425 IEM_MC_PUSH_U32(i8Imm);
2426 break;
2427 case IEMMODE_64BIT:
2428 IEM_MC_PUSH_U64(i8Imm);
2429 break;
2430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2431 }
2432 IEM_MC_ADVANCE_RIP_AND_FINISH();
2433 IEM_MC_END();
2434}
2435
2436
2437/**
2438 * @opcode 0x6b
2439 */
2440FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2441{
2442 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2443 IEMOP_HLP_MIN_186();
2444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2446
2447 switch (pVCpu->iem.s.enmEffOpSize)
2448 {
2449 case IEMMODE_16BIT:
2450 if (IEM_IS_MODRM_REG_MODE(bRm))
2451 {
2452 /* register operand */
2453 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2455
2456 IEM_MC_BEGIN(3, 1);
2457 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2458 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2459 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2460 IEM_MC_LOCAL(uint16_t, u16Tmp);
2461
2462 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2463 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2464 IEM_MC_REF_EFLAGS(pEFlags);
2465 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2466 pu16Dst, u16Src, pEFlags);
2467 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2468
2469 IEM_MC_ADVANCE_RIP_AND_FINISH();
2470 IEM_MC_END();
2471 }
2472 else
2473 {
2474 /* memory operand */
2475 IEM_MC_BEGIN(3, 2);
2476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2477 IEM_MC_ARG(uint16_t, u16Src, 1);
2478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2479 IEM_MC_LOCAL(uint16_t, u16Tmp);
2480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2481
2482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2483 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2484 IEM_MC_ASSIGN(u16Src, u16Imm);
2485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2486 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2487 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2488 IEM_MC_REF_EFLAGS(pEFlags);
2489 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2490 pu16Dst, u16Src, pEFlags);
2491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496 return VINF_SUCCESS;
2497
2498 case IEMMODE_32BIT:
2499 if (IEM_IS_MODRM_REG_MODE(bRm))
2500 {
2501 /* register operand */
2502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504
2505 IEM_MC_BEGIN(3, 1);
2506 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2507 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2509 IEM_MC_LOCAL(uint32_t, u32Tmp);
2510
2511 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2512 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2513 IEM_MC_REF_EFLAGS(pEFlags);
2514 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2515 pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP_AND_FINISH();
2519 IEM_MC_END();
2520 }
2521 else
2522 {
2523 /* memory operand */
2524 IEM_MC_BEGIN(3, 2);
2525 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2526 IEM_MC_ARG(uint32_t, u32Src, 1);
2527 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2528 IEM_MC_LOCAL(uint32_t, u32Tmp);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2532 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2533 IEM_MC_ASSIGN(u32Src, u32Imm);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2535 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2536 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2537 IEM_MC_REF_EFLAGS(pEFlags);
2538 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2539 pu32Dst, u32Src, pEFlags);
2540 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2541
2542 IEM_MC_ADVANCE_RIP_AND_FINISH();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546
2547 case IEMMODE_64BIT:
2548 if (IEM_IS_MODRM_REG_MODE(bRm))
2549 {
2550 /* register operand */
2551 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2553
2554 IEM_MC_BEGIN(3, 1);
2555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2556 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2557 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2558 IEM_MC_LOCAL(uint64_t, u64Tmp);
2559
2560 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2561 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2562 IEM_MC_REF_EFLAGS(pEFlags);
2563 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2564 pu64Dst, u64Src, pEFlags);
2565 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2566
2567 IEM_MC_ADVANCE_RIP_AND_FINISH();
2568 IEM_MC_END();
2569 }
2570 else
2571 {
2572 /* memory operand */
2573 IEM_MC_BEGIN(3, 2);
2574 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2575 IEM_MC_ARG(uint64_t, u64Src, 1);
2576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2577 IEM_MC_LOCAL(uint64_t, u64Tmp);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2579
2580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2581 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2582 IEM_MC_ASSIGN(u64Src, u64Imm);
2583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2584 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2585 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2586 IEM_MC_REF_EFLAGS(pEFlags);
2587 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2588 pu64Dst, u64Src, pEFlags);
2589 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2590
2591 IEM_MC_ADVANCE_RIP_AND_FINISH();
2592 IEM_MC_END();
2593 }
2594 return VINF_SUCCESS;
2595
2596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2597 }
2598 AssertFailedReturn(VERR_IEM_IPE_8);
2599}
2600
2601
2602/**
2603 * @opcode 0x6c
2604 */
2605FNIEMOP_DEF(iemOp_insb_Yb_DX)
2606{
2607 IEMOP_HLP_MIN_186();
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2610 {
2611 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2612 switch (pVCpu->iem.s.enmEffAddrMode)
2613 {
2614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2618 }
2619 }
2620 else
2621 {
2622 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2623 switch (pVCpu->iem.s.enmEffAddrMode)
2624 {
2625 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2626 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2627 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2629 }
2630 }
2631}
2632
2633
2634/**
2635 * @opcode 0x6d
2636 */
2637FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2638{
2639 IEMOP_HLP_MIN_186();
2640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2641 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2642 {
2643 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2644 switch (pVCpu->iem.s.enmEffOpSize)
2645 {
2646 case IEMMODE_16BIT:
2647 switch (pVCpu->iem.s.enmEffAddrMode)
2648 {
2649 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2650 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2651 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2653 }
2654 break;
2655 case IEMMODE_64BIT:
2656 case IEMMODE_32BIT:
2657 switch (pVCpu->iem.s.enmEffAddrMode)
2658 {
2659 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2663 }
2664 break;
2665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2666 }
2667 }
2668 else
2669 {
2670 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2671 switch (pVCpu->iem.s.enmEffOpSize)
2672 {
2673 case IEMMODE_16BIT:
2674 switch (pVCpu->iem.s.enmEffAddrMode)
2675 {
2676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2680 }
2681 break;
2682 case IEMMODE_64BIT:
2683 case IEMMODE_32BIT:
2684 switch (pVCpu->iem.s.enmEffAddrMode)
2685 {
2686 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2687 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2688 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2690 }
2691 break;
2692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2693 }
2694 }
2695}
2696
2697
2698/**
2699 * @opcode 0x6e
2700 */
2701FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2702{
2703 IEMOP_HLP_MIN_186();
2704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2705 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2706 {
2707 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2708 switch (pVCpu->iem.s.enmEffAddrMode)
2709 {
2710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2714 }
2715 }
2716 else
2717 {
2718 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2719 switch (pVCpu->iem.s.enmEffAddrMode)
2720 {
2721 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2722 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2723 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2725 }
2726 }
2727}
2728
2729
2730/**
2731 * @opcode 0x6f
2732 */
2733FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2734{
2735 IEMOP_HLP_MIN_186();
2736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2737 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2738 {
2739 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2740 switch (pVCpu->iem.s.enmEffOpSize)
2741 {
2742 case IEMMODE_16BIT:
2743 switch (pVCpu->iem.s.enmEffAddrMode)
2744 {
2745 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2747 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2749 }
2750 break;
2751 case IEMMODE_64BIT:
2752 case IEMMODE_32BIT:
2753 switch (pVCpu->iem.s.enmEffAddrMode)
2754 {
2755 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2757 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2759 }
2760 break;
2761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2762 }
2763 }
2764 else
2765 {
2766 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2767 switch (pVCpu->iem.s.enmEffOpSize)
2768 {
2769 case IEMMODE_16BIT:
2770 switch (pVCpu->iem.s.enmEffAddrMode)
2771 {
2772 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2773 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2774 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2776 }
2777 break;
2778 case IEMMODE_64BIT:
2779 case IEMMODE_32BIT:
2780 switch (pVCpu->iem.s.enmEffAddrMode)
2781 {
2782 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2786 }
2787 break;
2788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2789 }
2790 }
2791}
2792
2793
2794/**
2795 * @opcode 0x70
2796 */
2797FNIEMOP_DEF(iemOp_jo_Jb)
2798{
2799 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2800 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2803
2804 IEM_MC_BEGIN(0, 0);
2805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2806 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2807 } IEM_MC_ELSE() {
2808 IEM_MC_ADVANCE_RIP_AND_FINISH();
2809 } IEM_MC_ENDIF();
2810 IEM_MC_END();
2811}
2812
2813
2814/**
2815 * @opcode 0x71
2816 */
2817FNIEMOP_DEF(iemOp_jno_Jb)
2818{
2819 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2823
2824 IEM_MC_BEGIN(0, 0);
2825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2826 IEM_MC_ADVANCE_RIP_AND_FINISH();
2827 } IEM_MC_ELSE() {
2828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2829 } IEM_MC_ENDIF();
2830 IEM_MC_END();
2831}
2832
2833/**
2834 * @opcode 0x72
2835 */
2836FNIEMOP_DEF(iemOp_jc_Jb)
2837{
2838 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2841 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2842
2843 IEM_MC_BEGIN(0, 0);
2844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2845 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2846 } IEM_MC_ELSE() {
2847 IEM_MC_ADVANCE_RIP_AND_FINISH();
2848 } IEM_MC_ENDIF();
2849 IEM_MC_END();
2850}
2851
2852
2853/**
2854 * @opcode 0x73
2855 */
2856FNIEMOP_DEF(iemOp_jnc_Jb)
2857{
2858 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2859 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2862
2863 IEM_MC_BEGIN(0, 0);
2864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2865 IEM_MC_ADVANCE_RIP_AND_FINISH();
2866 } IEM_MC_ELSE() {
2867 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2868 } IEM_MC_ENDIF();
2869 IEM_MC_END();
2870}
2871
2872
2873/**
2874 * @opcode 0x74
2875 */
2876FNIEMOP_DEF(iemOp_je_Jb)
2877{
2878 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2879 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2882
2883 IEM_MC_BEGIN(0, 0);
2884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2885 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2886 } IEM_MC_ELSE() {
2887 IEM_MC_ADVANCE_RIP_AND_FINISH();
2888 } IEM_MC_ENDIF();
2889 IEM_MC_END();
2890}
2891
2892
2893/**
2894 * @opcode 0x75
2895 */
2896FNIEMOP_DEF(iemOp_jne_Jb)
2897{
2898 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2899 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2902
2903 IEM_MC_BEGIN(0, 0);
2904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2905 IEM_MC_ADVANCE_RIP_AND_FINISH();
2906 } IEM_MC_ELSE() {
2907 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2908 } IEM_MC_ENDIF();
2909 IEM_MC_END();
2910}
2911
2912
2913/**
2914 * @opcode 0x76
2915 */
2916FNIEMOP_DEF(iemOp_jbe_Jb)
2917{
2918 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2919 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2921 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2922
2923 IEM_MC_BEGIN(0, 0);
2924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2925 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2926 } IEM_MC_ELSE() {
2927 IEM_MC_ADVANCE_RIP_AND_FINISH();
2928 } IEM_MC_ENDIF();
2929 IEM_MC_END();
2930}
2931
2932
2933/**
2934 * @opcode 0x77
2935 */
2936FNIEMOP_DEF(iemOp_jnbe_Jb)
2937{
2938 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2945 IEM_MC_ADVANCE_RIP_AND_FINISH();
2946 } IEM_MC_ELSE() {
2947 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950}
2951
2952
2953/**
2954 * @opcode 0x78
2955 */
2956FNIEMOP_DEF(iemOp_js_Jb)
2957{
2958 IEMOP_MNEMONIC(js_Jb, "js Jb");
2959 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2962
2963 IEM_MC_BEGIN(0, 0);
2964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2965 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2966 } IEM_MC_ELSE() {
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 } IEM_MC_ENDIF();
2969 IEM_MC_END();
2970}
2971
2972
2973/**
2974 * @opcode 0x79
2975 */
2976FNIEMOP_DEF(iemOp_jns_Jb)
2977{
2978 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2979 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2982
2983 IEM_MC_BEGIN(0, 0);
2984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2985 IEM_MC_ADVANCE_RIP_AND_FINISH();
2986 } IEM_MC_ELSE() {
2987 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2988 } IEM_MC_ENDIF();
2989 IEM_MC_END();
2990}
2991
2992
2993/**
2994 * @opcode 0x7a
2995 */
2996FNIEMOP_DEF(iemOp_jp_Jb)
2997{
2998 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2999 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3002
3003 IEM_MC_BEGIN(0, 0);
3004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3005 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3006 } IEM_MC_ELSE() {
3007 IEM_MC_ADVANCE_RIP_AND_FINISH();
3008 } IEM_MC_ENDIF();
3009 IEM_MC_END();
3010}
3011
3012
3013/**
3014 * @opcode 0x7b
3015 */
3016FNIEMOP_DEF(iemOp_jnp_Jb)
3017{
3018 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3019 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3022
3023 IEM_MC_BEGIN(0, 0);
3024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3025 IEM_MC_ADVANCE_RIP_AND_FINISH();
3026 } IEM_MC_ELSE() {
3027 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3028 } IEM_MC_ENDIF();
3029 IEM_MC_END();
3030}
3031
3032
3033/**
3034 * @opcode 0x7c
3035 */
3036FNIEMOP_DEF(iemOp_jl_Jb)
3037{
3038 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3039 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3042
3043 IEM_MC_BEGIN(0, 0);
3044 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3045 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3046 } IEM_MC_ELSE() {
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 } IEM_MC_ENDIF();
3049 IEM_MC_END();
3050}
3051
3052
3053/**
3054 * @opcode 0x7d
3055 */
3056FNIEMOP_DEF(iemOp_jnl_Jb)
3057{
3058 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3059 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3062
3063 IEM_MC_BEGIN(0, 0);
3064 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 } IEM_MC_ELSE() {
3067 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3068 } IEM_MC_ENDIF();
3069 IEM_MC_END();
3070}
3071
3072
3073/**
3074 * @opcode 0x7e
3075 */
3076FNIEMOP_DEF(iemOp_jle_Jb)
3077{
3078 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3079 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3081 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3082
3083 IEM_MC_BEGIN(0, 0);
3084 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3085 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3086 } IEM_MC_ELSE() {
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 } IEM_MC_ENDIF();
3089 IEM_MC_END();
3090}
3091
3092
3093/**
3094 * @opcode 0x7f
3095 */
3096FNIEMOP_DEF(iemOp_jnle_Jb)
3097{
3098 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3099 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3102
3103 IEM_MC_BEGIN(0, 0);
3104 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3105 IEM_MC_ADVANCE_RIP_AND_FINISH();
3106 } IEM_MC_ELSE() {
3107 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3108 } IEM_MC_ENDIF();
3109 IEM_MC_END();
3110}
3111
3112
3113/**
3114 * @opcode 0x80
3115 */
3116FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3117{
3118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3119 switch (IEM_GET_MODRM_REG_8(bRm))
3120 {
3121 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3122 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3123 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3124 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3125 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3126 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3127 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3128 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3129 }
3130 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3131
3132 if (IEM_IS_MODRM_REG_MODE(bRm))
3133 {
3134 /* register target */
3135 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_BEGIN(3, 0);
3138 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3139 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3141
3142 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3143 IEM_MC_REF_EFLAGS(pEFlags);
3144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory target */
3152 uint32_t fAccess;
3153 if (pImpl->pfnLockedU8)
3154 fAccess = IEM_ACCESS_DATA_RW;
3155 else /* CMP */
3156 fAccess = IEM_ACCESS_DATA_R;
3157 IEM_MC_BEGIN(3, 2);
3158 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3161
3162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3163 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3164 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3165 if (pImpl->pfnLockedU8)
3166 IEMOP_HLP_DONE_DECODING();
3167 else
3168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3169
3170 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3171 IEM_MC_FETCH_EFLAGS(EFlags);
3172 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3174 else
3175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3176
3177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3178 IEM_MC_COMMIT_EFLAGS(EFlags);
3179 IEM_MC_ADVANCE_RIP_AND_FINISH();
3180 IEM_MC_END();
3181 }
3182}
3183
3184
3185/**
3186 * @opcode 0x81
3187 */
3188FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 switch (IEM_GET_MODRM_REG_8(bRm))
3192 {
3193 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3194 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3195 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3196 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3197 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3198 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3199 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3200 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3201 }
3202 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3203
3204 switch (pVCpu->iem.s.enmEffOpSize)
3205 {
3206 case IEMMODE_16BIT:
3207 {
3208 if (IEM_IS_MODRM_REG_MODE(bRm))
3209 {
3210 /* register target */
3211 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213 IEM_MC_BEGIN(3, 0);
3214 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3215 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3217
3218 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3219 IEM_MC_REF_EFLAGS(pEFlags);
3220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3221
3222 IEM_MC_ADVANCE_RIP_AND_FINISH();
3223 IEM_MC_END();
3224 }
3225 else
3226 {
3227 /* memory target */
3228 uint32_t fAccess;
3229 if (pImpl->pfnLockedU16)
3230 fAccess = IEM_ACCESS_DATA_RW;
3231 else /* CMP, TEST */
3232 fAccess = IEM_ACCESS_DATA_R;
3233 IEM_MC_BEGIN(3, 2);
3234 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3235 IEM_MC_ARG(uint16_t, u16Src, 1);
3236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3238
3239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3240 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3241 IEM_MC_ASSIGN(u16Src, u16Imm);
3242 if (pImpl->pfnLockedU16)
3243 IEMOP_HLP_DONE_DECODING();
3244 else
3245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3246 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3247 IEM_MC_FETCH_EFLAGS(EFlags);
3248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3250 else
3251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3252
3253 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3254 IEM_MC_COMMIT_EFLAGS(EFlags);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 break;
3259 }
3260
3261 case IEMMODE_32BIT:
3262 {
3263 if (IEM_IS_MODRM_REG_MODE(bRm))
3264 {
3265 /* register target */
3266 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3268 IEM_MC_BEGIN(3, 0);
3269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3270 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3272
3273 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3274 IEM_MC_REF_EFLAGS(pEFlags);
3275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3276 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3277 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3278
3279 IEM_MC_ADVANCE_RIP_AND_FINISH();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /* memory target */
3285 uint32_t fAccess;
3286 if (pImpl->pfnLockedU32)
3287 fAccess = IEM_ACCESS_DATA_RW;
3288 else /* CMP, TEST */
3289 fAccess = IEM_ACCESS_DATA_R;
3290 IEM_MC_BEGIN(3, 2);
3291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3292 IEM_MC_ARG(uint32_t, u32Src, 1);
3293 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3295
3296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3297 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3298 IEM_MC_ASSIGN(u32Src, u32Imm);
3299 if (pImpl->pfnLockedU32)
3300 IEMOP_HLP_DONE_DECODING();
3301 else
3302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3303 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3304 IEM_MC_FETCH_EFLAGS(EFlags);
3305 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3307 else
3308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3309
3310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3311 IEM_MC_COMMIT_EFLAGS(EFlags);
3312 IEM_MC_ADVANCE_RIP_AND_FINISH();
3313 IEM_MC_END();
3314 }
3315 break;
3316 }
3317
3318 case IEMMODE_64BIT:
3319 {
3320 if (IEM_IS_MODRM_REG_MODE(bRm))
3321 {
3322 /* register target */
3323 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3325 IEM_MC_BEGIN(3, 0);
3326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3327 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3329
3330 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3331 IEM_MC_REF_EFLAGS(pEFlags);
3332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3333
3334 IEM_MC_ADVANCE_RIP_AND_FINISH();
3335 IEM_MC_END();
3336 }
3337 else
3338 {
3339 /* memory target */
3340 uint32_t fAccess;
3341 if (pImpl->pfnLockedU64)
3342 fAccess = IEM_ACCESS_DATA_RW;
3343 else /* CMP */
3344 fAccess = IEM_ACCESS_DATA_R;
3345 IEM_MC_BEGIN(3, 2);
3346 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3347 IEM_MC_ARG(uint64_t, u64Src, 1);
3348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3350
3351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3352 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3353 if (pImpl->pfnLockedU64)
3354 IEMOP_HLP_DONE_DECODING();
3355 else
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_ASSIGN(u64Src, u64Imm);
3358 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3359 IEM_MC_FETCH_EFLAGS(EFlags);
3360 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3362 else
3363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3364
3365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3366 IEM_MC_COMMIT_EFLAGS(EFlags);
3367 IEM_MC_ADVANCE_RIP_AND_FINISH();
3368 IEM_MC_END();
3369 }
3370 break;
3371 }
3372
3373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3374 }
3375}
3376
3377
3378/**
3379 * @opcode 0x82
3380 * @opmnemonic grp1_82
3381 * @opgroup og_groups
3382 */
3383FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3384{
3385 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3386 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3387}
3388
3389
3390/**
3391 * @opcode 0x83
3392 */
3393FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3394{
3395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3396 switch (IEM_GET_MODRM_REG_8(bRm))
3397 {
3398 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3399 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3400 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3401 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3402 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3403 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3404 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3405 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3406 }
3407 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3408 to the 386 even if absent in the intel reference manuals and some
3409 3rd party opcode listings. */
3410 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3411
3412 if (IEM_IS_MODRM_REG_MODE(bRm))
3413 {
3414 /*
3415 * Register target
3416 */
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3419 switch (pVCpu->iem.s.enmEffOpSize)
3420 {
3421 case IEMMODE_16BIT:
3422 {
3423 IEM_MC_BEGIN(3, 0);
3424 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3425 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3426 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3427
3428 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3429 IEM_MC_REF_EFLAGS(pEFlags);
3430 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3431
3432 IEM_MC_ADVANCE_RIP_AND_FINISH();
3433 IEM_MC_END();
3434 break;
3435 }
3436
3437 case IEMMODE_32BIT:
3438 {
3439 IEM_MC_BEGIN(3, 0);
3440 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3441 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3442 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3443
3444 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3445 IEM_MC_REF_EFLAGS(pEFlags);
3446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3447 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3448 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3449
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 break;
3453 }
3454
3455 case IEMMODE_64BIT:
3456 {
3457 IEM_MC_BEGIN(3, 0);
3458 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3459 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3460 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3461
3462 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3463 IEM_MC_REF_EFLAGS(pEFlags);
3464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3465
3466 IEM_MC_ADVANCE_RIP_AND_FINISH();
3467 IEM_MC_END();
3468 break;
3469 }
3470
3471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3472 }
3473 }
3474 else
3475 {
3476 /*
3477 * Memory target.
3478 */
3479 uint32_t fAccess;
3480 if (pImpl->pfnLockedU16)
3481 fAccess = IEM_ACCESS_DATA_RW;
3482 else /* CMP */
3483 fAccess = IEM_ACCESS_DATA_R;
3484
3485 switch (pVCpu->iem.s.enmEffOpSize)
3486 {
3487 case IEMMODE_16BIT:
3488 {
3489 IEM_MC_BEGIN(3, 2);
3490 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3491 IEM_MC_ARG(uint16_t, u16Src, 1);
3492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3494
3495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3496 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3497 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3498 if (pImpl->pfnLockedU16)
3499 IEMOP_HLP_DONE_DECODING();
3500 else
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3503 IEM_MC_FETCH_EFLAGS(EFlags);
3504 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3506 else
3507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3508
3509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3510 IEM_MC_COMMIT_EFLAGS(EFlags);
3511 IEM_MC_ADVANCE_RIP_AND_FINISH();
3512 IEM_MC_END();
3513 break;
3514 }
3515
3516 case IEMMODE_32BIT:
3517 {
3518 IEM_MC_BEGIN(3, 2);
3519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3520 IEM_MC_ARG(uint32_t, u32Src, 1);
3521 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3523
3524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3525 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3526 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3527 if (pImpl->pfnLockedU32)
3528 IEMOP_HLP_DONE_DECODING();
3529 else
3530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3531 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3532 IEM_MC_FETCH_EFLAGS(EFlags);
3533 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3535 else
3536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3537
3538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3539 IEM_MC_COMMIT_EFLAGS(EFlags);
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 IEM_MC_END();
3542 break;
3543 }
3544
3545 case IEMMODE_64BIT:
3546 {
3547 IEM_MC_BEGIN(3, 2);
3548 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3549 IEM_MC_ARG(uint64_t, u64Src, 1);
3550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3554 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3555 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3556 if (pImpl->pfnLockedU64)
3557 IEMOP_HLP_DONE_DECODING();
3558 else
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3561 IEM_MC_FETCH_EFLAGS(EFlags);
3562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3564 else
3565 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3566
3567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3568 IEM_MC_COMMIT_EFLAGS(EFlags);
3569 IEM_MC_ADVANCE_RIP_AND_FINISH();
3570 IEM_MC_END();
3571 break;
3572 }
3573
3574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3575 }
3576 }
3577}
3578
3579
3580/**
3581 * @opcode 0x84
3582 */
3583FNIEMOP_DEF(iemOp_test_Eb_Gb)
3584{
3585 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3587 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3588}
3589
3590
3591/**
3592 * @opcode 0x85
3593 */
3594FNIEMOP_DEF(iemOp_test_Ev_Gv)
3595{
3596 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3597 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3598 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3599}
3600
3601
3602/**
3603 * @opcode 0x86
3604 */
3605FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3606{
3607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3608 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3609
3610 /*
3611 * If rm is denoting a register, no more instruction bytes.
3612 */
3613 if (IEM_IS_MODRM_REG_MODE(bRm))
3614 {
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616
3617 IEM_MC_BEGIN(0, 2);
3618 IEM_MC_LOCAL(uint8_t, uTmp1);
3619 IEM_MC_LOCAL(uint8_t, uTmp2);
3620
3621 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3622 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3623 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3624 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3625
3626 IEM_MC_ADVANCE_RIP_AND_FINISH();
3627 IEM_MC_END();
3628 }
3629 else
3630 {
3631 /*
3632 * We're accessing memory.
3633 */
3634/** @todo the register must be committed separately! */
3635 IEM_MC_BEGIN(2, 2);
3636 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3637 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3639
3640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3641 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3642 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3643 if (!pVCpu->iem.s.fDisregardLock)
3644 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3645 else
3646 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3648
3649 IEM_MC_ADVANCE_RIP_AND_FINISH();
3650 IEM_MC_END();
3651 }
3652}
3653
3654
3655/**
3656 * @opcode 0x87
3657 */
3658FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3659{
3660 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3662
3663 /*
3664 * If rm is denoting a register, no more instruction bytes.
3665 */
3666 if (IEM_IS_MODRM_REG_MODE(bRm))
3667 {
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669
3670 switch (pVCpu->iem.s.enmEffOpSize)
3671 {
3672 case IEMMODE_16BIT:
3673 IEM_MC_BEGIN(0, 2);
3674 IEM_MC_LOCAL(uint16_t, uTmp1);
3675 IEM_MC_LOCAL(uint16_t, uTmp2);
3676
3677 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3678 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3679 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3680 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3681
3682 IEM_MC_ADVANCE_RIP_AND_FINISH();
3683 IEM_MC_END();
3684 return VINF_SUCCESS;
3685
3686 case IEMMODE_32BIT:
3687 IEM_MC_BEGIN(0, 2);
3688 IEM_MC_LOCAL(uint32_t, uTmp1);
3689 IEM_MC_LOCAL(uint32_t, uTmp2);
3690
3691 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3692 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3693 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3694 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3695
3696 IEM_MC_ADVANCE_RIP_AND_FINISH();
3697 IEM_MC_END();
3698 return VINF_SUCCESS;
3699
3700 case IEMMODE_64BIT:
3701 IEM_MC_BEGIN(0, 2);
3702 IEM_MC_LOCAL(uint64_t, uTmp1);
3703 IEM_MC_LOCAL(uint64_t, uTmp2);
3704
3705 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3706 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3708 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 return VINF_SUCCESS;
3713
3714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3715 }
3716 }
3717 else
3718 {
3719 /*
3720 * We're accessing memory.
3721 */
3722 switch (pVCpu->iem.s.enmEffOpSize)
3723 {
3724/** @todo the register must be committed separately! */
3725 case IEMMODE_16BIT:
3726 IEM_MC_BEGIN(2, 2);
3727 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3728 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3732 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3733 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3734 if (!pVCpu->iem.s.fDisregardLock)
3735 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3736 else
3737 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3739
3740 IEM_MC_ADVANCE_RIP_AND_FINISH();
3741 IEM_MC_END();
3742 return VINF_SUCCESS;
3743
3744 case IEMMODE_32BIT:
3745 IEM_MC_BEGIN(2, 2);
3746 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3747 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3749
3750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3751 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3752 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3753 if (!pVCpu->iem.s.fDisregardLock)
3754 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3755 else
3756 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3758
3759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3760 IEM_MC_ADVANCE_RIP_AND_FINISH();
3761 IEM_MC_END();
3762 return VINF_SUCCESS;
3763
3764 case IEMMODE_64BIT:
3765 IEM_MC_BEGIN(2, 2);
3766 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3767 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3769
3770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3771 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3772 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3773 if (!pVCpu->iem.s.fDisregardLock)
3774 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3775 else
3776 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3777 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3778
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 IEM_MC_END();
3781 return VINF_SUCCESS;
3782
3783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3784 }
3785 }
3786}
3787
3788
3789/**
3790 * @opcode 0x88
3791 */
3792FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3793{
3794 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3795
3796 uint8_t bRm;
3797 IEM_OPCODE_GET_NEXT_U8(&bRm);
3798
3799 /*
3800 * If rm is denoting a register, no more instruction bytes.
3801 */
3802 if (IEM_IS_MODRM_REG_MODE(bRm))
3803 {
3804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3805 IEM_MC_BEGIN(0, 1);
3806 IEM_MC_LOCAL(uint8_t, u8Value);
3807 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3808 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
3809 IEM_MC_ADVANCE_RIP_AND_FINISH();
3810 IEM_MC_END();
3811 }
3812 else
3813 {
3814 /*
3815 * We're writing a register to memory.
3816 */
3817 IEM_MC_BEGIN(0, 2);
3818 IEM_MC_LOCAL(uint8_t, u8Value);
3819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3823 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3824 IEM_MC_ADVANCE_RIP_AND_FINISH();
3825 IEM_MC_END();
3826 }
3827}
3828
3829
3830/**
3831 * @opcode 0x89
3832 */
3833FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3834{
3835 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3836
3837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3838
3839 /*
3840 * If rm is denoting a register, no more instruction bytes.
3841 */
3842 if (IEM_IS_MODRM_REG_MODE(bRm))
3843 {
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 switch (pVCpu->iem.s.enmEffOpSize)
3846 {
3847 case IEMMODE_16BIT:
3848 IEM_MC_BEGIN(0, 1);
3849 IEM_MC_LOCAL(uint16_t, u16Value);
3850 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3851 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
3852 IEM_MC_ADVANCE_RIP_AND_FINISH();
3853 IEM_MC_END();
3854 break;
3855
3856 case IEMMODE_32BIT:
3857 IEM_MC_BEGIN(0, 1);
3858 IEM_MC_LOCAL(uint32_t, u32Value);
3859 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3860 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 break;
3864
3865 case IEMMODE_64BIT:
3866 IEM_MC_BEGIN(0, 1);
3867 IEM_MC_LOCAL(uint64_t, u64Value);
3868 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3869 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
3870 IEM_MC_ADVANCE_RIP_AND_FINISH();
3871 IEM_MC_END();
3872 break;
3873
3874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3875 }
3876 }
3877 else
3878 {
3879 /*
3880 * We're writing a register to memory.
3881 */
3882 switch (pVCpu->iem.s.enmEffOpSize)
3883 {
3884 case IEMMODE_16BIT:
3885 IEM_MC_BEGIN(0, 2);
3886 IEM_MC_LOCAL(uint16_t, u16Value);
3887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3891 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3892 IEM_MC_ADVANCE_RIP_AND_FINISH();
3893 IEM_MC_END();
3894 break;
3895
3896 case IEMMODE_32BIT:
3897 IEM_MC_BEGIN(0, 2);
3898 IEM_MC_LOCAL(uint32_t, u32Value);
3899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3903 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3904 IEM_MC_ADVANCE_RIP_AND_FINISH();
3905 IEM_MC_END();
3906 break;
3907
3908 case IEMMODE_64BIT:
3909 IEM_MC_BEGIN(0, 2);
3910 IEM_MC_LOCAL(uint64_t, u64Value);
3911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3915 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3916 IEM_MC_ADVANCE_RIP_AND_FINISH();
3917 IEM_MC_END();
3918 break;
3919
3920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3921 }
3922 }
3923}
3924
3925
3926/**
3927 * @opcode 0x8a
3928 */
3929FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3930{
3931 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3932
3933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3934
3935 /*
3936 * If rm is denoting a register, no more instruction bytes.
3937 */
3938 if (IEM_IS_MODRM_REG_MODE(bRm))
3939 {
3940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3941 IEM_MC_BEGIN(0, 1);
3942 IEM_MC_LOCAL(uint8_t, u8Value);
3943 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3944 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3945 IEM_MC_ADVANCE_RIP_AND_FINISH();
3946 IEM_MC_END();
3947 }
3948 else
3949 {
3950 /*
3951 * We're loading a register from memory.
3952 */
3953 IEM_MC_BEGIN(0, 2);
3954 IEM_MC_LOCAL(uint8_t, u8Value);
3955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3958 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3959 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3960 IEM_MC_ADVANCE_RIP_AND_FINISH();
3961 IEM_MC_END();
3962 }
3963}
3964
3965
3966/**
3967 * @opcode 0x8b
3968 */
3969FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3970{
3971 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3972
3973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3974
3975 /*
3976 * If rm is denoting a register, no more instruction bytes.
3977 */
3978 if (IEM_IS_MODRM_REG_MODE(bRm))
3979 {
3980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3981 switch (pVCpu->iem.s.enmEffOpSize)
3982 {
3983 case IEMMODE_16BIT:
3984 IEM_MC_BEGIN(0, 1);
3985 IEM_MC_LOCAL(uint16_t, u16Value);
3986 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3987 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
3988 IEM_MC_ADVANCE_RIP_AND_FINISH();
3989 IEM_MC_END();
3990 break;
3991
3992 case IEMMODE_32BIT:
3993 IEM_MC_BEGIN(0, 1);
3994 IEM_MC_LOCAL(uint32_t, u32Value);
3995 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3996 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
3997 IEM_MC_ADVANCE_RIP_AND_FINISH();
3998 IEM_MC_END();
3999 break;
4000
4001 case IEMMODE_64BIT:
4002 IEM_MC_BEGIN(0, 1);
4003 IEM_MC_LOCAL(uint64_t, u64Value);
4004 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4005 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4006 IEM_MC_ADVANCE_RIP_AND_FINISH();
4007 IEM_MC_END();
4008 break;
4009
4010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4011 }
4012 }
4013 else
4014 {
4015 /*
4016 * We're loading a register from memory.
4017 */
4018 switch (pVCpu->iem.s.enmEffOpSize)
4019 {
4020 case IEMMODE_16BIT:
4021 IEM_MC_BEGIN(0, 2);
4022 IEM_MC_LOCAL(uint16_t, u16Value);
4023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4027 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4028 IEM_MC_ADVANCE_RIP_AND_FINISH();
4029 IEM_MC_END();
4030 break;
4031
4032 case IEMMODE_32BIT:
4033 IEM_MC_BEGIN(0, 2);
4034 IEM_MC_LOCAL(uint32_t, u32Value);
4035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4038 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4039 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4040 IEM_MC_ADVANCE_RIP_AND_FINISH();
4041 IEM_MC_END();
4042 break;
4043
4044 case IEMMODE_64BIT:
4045 IEM_MC_BEGIN(0, 2);
4046 IEM_MC_LOCAL(uint64_t, u64Value);
4047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4050 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4051 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4052 IEM_MC_ADVANCE_RIP_AND_FINISH();
4053 IEM_MC_END();
4054 break;
4055
4056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4057 }
4058 }
4059}
4060
4061
4062/**
4063 * opcode 0x63
4064 * @todo Table fixme
4065 */
4066FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4067{
4068 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4069 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4070 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4071 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4072 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4073}
4074
4075
4076/**
4077 * @opcode 0x8c
4078 */
4079FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4080{
4081 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4082
4083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4084
4085 /*
4086 * Check that the destination register exists. The REX.R prefix is ignored.
4087 */
4088 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4089 if ( iSegReg > X86_SREG_GS)
4090 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4091
4092 /*
4093 * If rm is denoting a register, no more instruction bytes.
4094 * In that case, the operand size is respected and the upper bits are
4095 * cleared (starting with some pentium).
4096 */
4097 if (IEM_IS_MODRM_REG_MODE(bRm))
4098 {
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4100 switch (pVCpu->iem.s.enmEffOpSize)
4101 {
4102 case IEMMODE_16BIT:
4103 IEM_MC_BEGIN(0, 1);
4104 IEM_MC_LOCAL(uint16_t, u16Value);
4105 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4106 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4107 IEM_MC_ADVANCE_RIP_AND_FINISH();
4108 IEM_MC_END();
4109 break;
4110
4111 case IEMMODE_32BIT:
4112 IEM_MC_BEGIN(0, 1);
4113 IEM_MC_LOCAL(uint32_t, u32Value);
4114 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4115 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4116 IEM_MC_ADVANCE_RIP_AND_FINISH();
4117 IEM_MC_END();
4118 break;
4119
4120 case IEMMODE_64BIT:
4121 IEM_MC_BEGIN(0, 1);
4122 IEM_MC_LOCAL(uint64_t, u64Value);
4123 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4124 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4125 IEM_MC_ADVANCE_RIP_AND_FINISH();
4126 IEM_MC_END();
4127 break;
4128
4129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4130 }
4131 }
4132 else
4133 {
4134 /*
4135 * We're saving the register to memory. The access is word sized
4136 * regardless of operand size prefixes.
4137 */
4138#if 0 /* not necessary */
4139 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4140#endif
4141 IEM_MC_BEGIN(0, 2);
4142 IEM_MC_LOCAL(uint16_t, u16Value);
4143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4147 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4148 IEM_MC_ADVANCE_RIP_AND_FINISH();
4149 IEM_MC_END();
4150 }
4151}
4152
4153
4154
4155
4156/**
4157 * @opcode 0x8d
4158 */
4159FNIEMOP_DEF(iemOp_lea_Gv_M)
4160{
4161 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4163 if (IEM_IS_MODRM_REG_MODE(bRm))
4164 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4165
4166 switch (pVCpu->iem.s.enmEffOpSize)
4167 {
4168 case IEMMODE_16BIT:
4169 IEM_MC_BEGIN(0, 2);
4170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4171 IEM_MC_LOCAL(uint16_t, u16Cast);
4172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4174 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4175 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
4176 IEM_MC_ADVANCE_RIP_AND_FINISH();
4177 IEM_MC_END();
4178 break;
4179
4180 case IEMMODE_32BIT:
4181 IEM_MC_BEGIN(0, 2);
4182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4183 IEM_MC_LOCAL(uint32_t, u32Cast);
4184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4186 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4187 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
4188 IEM_MC_ADVANCE_RIP_AND_FINISH();
4189 IEM_MC_END();
4190 break;
4191
4192 case IEMMODE_64BIT:
4193 IEM_MC_BEGIN(0, 1);
4194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4197 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
4198 IEM_MC_ADVANCE_RIP_AND_FINISH();
4199 IEM_MC_END();
4200 break;
4201
4202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4203 }
4204}
4205
4206
4207/**
4208 * @opcode 0x8e
4209 */
4210FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4211{
4212 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4213
4214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4215
4216 /*
4217 * The practical operand size is 16-bit.
4218 */
4219#if 0 /* not necessary */
4220 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4221#endif
4222
4223 /*
4224 * Check that the destination register exists and can be used with this
4225 * instruction. The REX.R prefix is ignored.
4226 */
4227 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4228 if ( iSegReg == X86_SREG_CS
4229 || iSegReg > X86_SREG_GS)
4230 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4231
4232 /*
4233 * If rm is denoting a register, no more instruction bytes.
4234 */
4235 if (IEM_IS_MODRM_REG_MODE(bRm))
4236 {
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4238 IEM_MC_BEGIN(2, 0);
4239 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4240 IEM_MC_ARG(uint16_t, u16Value, 1);
4241 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4242 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /*
4248 * We're loading the register from memory. The access is word sized
4249 * regardless of operand size prefixes.
4250 */
4251 IEM_MC_BEGIN(2, 1);
4252 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4253 IEM_MC_ARG(uint16_t, u16Value, 1);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4257 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4258 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4259 IEM_MC_END();
4260 }
4261 return VINF_SUCCESS;
4262}
4263
4264
4265/** Opcode 0x8f /0. */
4266FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4267{
4268 /* This bugger is rather annoying as it requires rSP to be updated before
4269 doing the effective address calculations. Will eventually require a
4270 split between the R/M+SIB decoding and the effective address
4271 calculation - which is something that is required for any attempt at
4272 reusing this code for a recompiler. It may also be good to have if we
4273 need to delay #UD exception caused by invalid lock prefixes.
4274
4275 For now, we'll do a mostly safe interpreter-only implementation here. */
4276 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4277 * now until tests show it's checked.. */
4278 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4279
4280 /* Register access is relatively easy and can share code. */
4281 if (IEM_IS_MODRM_REG_MODE(bRm))
4282 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
4283
4284 /*
4285 * Memory target.
4286 *
4287 * Intel says that RSP is incremented before it's used in any effective
4288 * address calcuations. This means some serious extra annoyance here since
4289 * we decode and calculate the effective address in one step and like to
4290 * delay committing registers till everything is done.
4291 *
4292 * So, we'll decode and calculate the effective address twice. This will
4293 * require some recoding if turned into a recompiler.
4294 */
4295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4296
4297#ifndef TST_IEM_CHECK_MC
4298 /* Calc effective address with modified ESP. */
4299/** @todo testcase */
4300 RTGCPTR GCPtrEff;
4301 VBOXSTRICTRC rcStrict;
4302 switch (pVCpu->iem.s.enmEffOpSize)
4303 {
4304 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4305 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4306 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4308 }
4309 if (rcStrict != VINF_SUCCESS)
4310 return rcStrict;
4311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4312
4313 /* Perform the operation - this should be CImpl. */
4314 RTUINT64U TmpRsp;
4315 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4316 switch (pVCpu->iem.s.enmEffOpSize)
4317 {
4318 case IEMMODE_16BIT:
4319 {
4320 uint16_t u16Value;
4321 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4322 if (rcStrict == VINF_SUCCESS)
4323 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4324 break;
4325 }
4326
4327 case IEMMODE_32BIT:
4328 {
4329 uint32_t u32Value;
4330 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4331 if (rcStrict == VINF_SUCCESS)
4332 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4333 break;
4334 }
4335
4336 case IEMMODE_64BIT:
4337 {
4338 uint64_t u64Value;
4339 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4340 if (rcStrict == VINF_SUCCESS)
4341 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4342 break;
4343 }
4344
4345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4346 }
4347 if (rcStrict == VINF_SUCCESS)
4348 {
4349 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4350 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
4351 }
4352 return rcStrict;
4353
4354#else
4355 return VERR_IEM_IPE_2;
4356#endif
4357}
4358
4359
4360/**
4361 * @opcode 0x8f
4362 */
4363FNIEMOP_DEF(iemOp_Grp1A__xop)
4364{
4365 /*
4366 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4367 * three byte VEX prefix, except that the mmmmm field cannot have the values
4368 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4369 */
4370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4371 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4372 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4373
4374 IEMOP_MNEMONIC(xop, "xop");
4375 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4376 {
4377 /** @todo Test when exctly the XOP conformance checks kick in during
4378 * instruction decoding and fetching (using \#PF). */
4379 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4380 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4381 if ( ( pVCpu->iem.s.fPrefixes
4382 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4383 == 0)
4384 {
4385 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4386 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4387 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4388 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4389 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4390 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4391 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4392 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4393 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4394
4395 /** @todo XOP: Just use new tables and decoders. */
4396 switch (bRm & 0x1f)
4397 {
4398 case 8: /* xop opcode map 8. */
4399 IEMOP_BITCH_ABOUT_STUB();
4400 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4401
4402 case 9: /* xop opcode map 9. */
4403 IEMOP_BITCH_ABOUT_STUB();
4404 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4405
4406 case 10: /* xop opcode map 10. */
4407 IEMOP_BITCH_ABOUT_STUB();
4408 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4409
4410 default:
4411 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4412 return IEMOP_RAISE_INVALID_OPCODE();
4413 }
4414 }
4415 else
4416 Log(("XOP: Invalid prefix mix!\n"));
4417 }
4418 else
4419 Log(("XOP: XOP support disabled!\n"));
4420 return IEMOP_RAISE_INVALID_OPCODE();
4421}
4422
4423
4424/**
4425 * Common 'xchg reg,rAX' helper.
4426 */
4427FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4428{
4429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4430
4431 iReg |= pVCpu->iem.s.uRexB;
4432 switch (pVCpu->iem.s.enmEffOpSize)
4433 {
4434 case IEMMODE_16BIT:
4435 IEM_MC_BEGIN(0, 2);
4436 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4437 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4438 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4439 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4440 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4441 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4442 IEM_MC_ADVANCE_RIP_AND_FINISH();
4443 IEM_MC_END();
4444 break;
4445
4446 case IEMMODE_32BIT:
4447 IEM_MC_BEGIN(0, 2);
4448 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4449 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4450 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4451 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4452 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4453 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4454 IEM_MC_ADVANCE_RIP_AND_FINISH();
4455 IEM_MC_END();
4456 break;
4457
4458 case IEMMODE_64BIT:
4459 IEM_MC_BEGIN(0, 2);
4460 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4461 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4462 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4463 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4464 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4465 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4466 IEM_MC_ADVANCE_RIP_AND_FINISH();
4467 IEM_MC_END();
4468 break;
4469
4470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4471 }
4472}
4473
4474
4475/**
4476 * @opcode 0x90
4477 */
4478FNIEMOP_DEF(iemOp_nop)
4479{
4480 /* R8/R8D and RAX/EAX can be exchanged. */
4481 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4482 {
4483 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4484 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4485 }
4486
4487 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4488 {
4489 IEMOP_MNEMONIC(pause, "pause");
4490#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4491 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4492 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4493#endif
4494#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4495 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4496 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4497#endif
4498 }
4499 else
4500 IEMOP_MNEMONIC(nop, "nop");
4501 IEM_MC_BEGIN(0, 0);
4502 IEM_MC_ADVANCE_RIP_AND_FINISH();
4503 IEM_MC_END();
4504}
4505
4506
4507/**
4508 * @opcode 0x91
4509 */
4510FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4511{
4512 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4513 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4514}
4515
4516
4517/**
4518 * @opcode 0x92
4519 */
4520FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4521{
4522 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4523 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4524}
4525
4526
4527/**
4528 * @opcode 0x93
4529 */
4530FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4531{
4532 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4533 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4534}
4535
4536
4537/**
4538 * @opcode 0x94
4539 */
4540FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4541{
4542 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4543 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4544}
4545
4546
4547/**
4548 * @opcode 0x95
4549 */
4550FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4551{
4552 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4553 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4554}
4555
4556
4557/**
4558 * @opcode 0x96
4559 */
4560FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4561{
4562 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4563 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4564}
4565
4566
4567/**
4568 * @opcode 0x97
4569 */
4570FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4571{
4572 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4573 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4574}
4575
4576
4577/**
4578 * @opcode 0x98
4579 */
4580FNIEMOP_DEF(iemOp_cbw)
4581{
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4583 switch (pVCpu->iem.s.enmEffOpSize)
4584 {
4585 case IEMMODE_16BIT:
4586 IEMOP_MNEMONIC(cbw, "cbw");
4587 IEM_MC_BEGIN(0, 1);
4588 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4589 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4590 } IEM_MC_ELSE() {
4591 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP_AND_FINISH();
4594 IEM_MC_END();
4595 break;
4596
4597 case IEMMODE_32BIT:
4598 IEMOP_MNEMONIC(cwde, "cwde");
4599 IEM_MC_BEGIN(0, 1);
4600 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4601 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4602 } IEM_MC_ELSE() {
4603 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4604 } IEM_MC_ENDIF();
4605 IEM_MC_ADVANCE_RIP_AND_FINISH();
4606 IEM_MC_END();
4607 break;
4608
4609 case IEMMODE_64BIT:
4610 IEMOP_MNEMONIC(cdqe, "cdqe");
4611 IEM_MC_BEGIN(0, 1);
4612 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4613 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4614 } IEM_MC_ELSE() {
4615 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4616 } IEM_MC_ENDIF();
4617 IEM_MC_ADVANCE_RIP_AND_FINISH();
4618 IEM_MC_END();
4619 break;
4620
4621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4622 }
4623}
4624
4625
4626/**
4627 * @opcode 0x99
4628 */
4629FNIEMOP_DEF(iemOp_cwd)
4630{
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 switch (pVCpu->iem.s.enmEffOpSize)
4633 {
4634 case IEMMODE_16BIT:
4635 IEMOP_MNEMONIC(cwd, "cwd");
4636 IEM_MC_BEGIN(0, 1);
4637 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4638 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4639 } IEM_MC_ELSE() {
4640 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4641 } IEM_MC_ENDIF();
4642 IEM_MC_ADVANCE_RIP_AND_FINISH();
4643 IEM_MC_END();
4644 break;
4645
4646 case IEMMODE_32BIT:
4647 IEMOP_MNEMONIC(cdq, "cdq");
4648 IEM_MC_BEGIN(0, 1);
4649 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4650 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4651 } IEM_MC_ELSE() {
4652 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4653 } IEM_MC_ENDIF();
4654 IEM_MC_ADVANCE_RIP_AND_FINISH();
4655 IEM_MC_END();
4656 break;
4657
4658 case IEMMODE_64BIT:
4659 IEMOP_MNEMONIC(cqo, "cqo");
4660 IEM_MC_BEGIN(0, 1);
4661 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4662 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4663 } IEM_MC_ELSE() {
4664 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4665 } IEM_MC_ENDIF();
4666 IEM_MC_ADVANCE_RIP_AND_FINISH();
4667 IEM_MC_END();
4668 break;
4669
4670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4671 }
4672}
4673
4674
4675/**
4676 * @opcode 0x9a
4677 */
4678FNIEMOP_DEF(iemOp_call_Ap)
4679{
4680 IEMOP_MNEMONIC(call_Ap, "call Ap");
4681 IEMOP_HLP_NO_64BIT();
4682
4683 /* Decode the far pointer address and pass it on to the far call C implementation. */
4684 uint32_t offSeg;
4685 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4686 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4687 else
4688 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4689 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4691 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4692}
4693
4694
4695/** Opcode 0x9b. (aka fwait) */
4696FNIEMOP_DEF(iemOp_wait)
4697{
4698 IEMOP_MNEMONIC(wait, "wait");
4699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4700
4701 IEM_MC_BEGIN(0, 0);
4702 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4704 IEM_MC_ADVANCE_RIP_AND_FINISH();
4705 IEM_MC_END();
4706}
4707
4708
4709/**
4710 * @opcode 0x9c
4711 */
4712FNIEMOP_DEF(iemOp_pushf_Fv)
4713{
4714 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4717 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4718}
4719
4720
4721/**
4722 * @opcode 0x9d
4723 */
4724FNIEMOP_DEF(iemOp_popf_Fv)
4725{
4726 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4729 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4730}
4731
4732
4733/**
4734 * @opcode 0x9e
4735 */
4736FNIEMOP_DEF(iemOp_sahf)
4737{
4738 IEMOP_MNEMONIC(sahf, "sahf");
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4741 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4742 return IEMOP_RAISE_INVALID_OPCODE();
4743 IEM_MC_BEGIN(0, 2);
4744 IEM_MC_LOCAL(uint32_t, u32Flags);
4745 IEM_MC_LOCAL(uint32_t, EFlags);
4746 IEM_MC_FETCH_EFLAGS(EFlags);
4747 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4748 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4749 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4750 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4751 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4752 IEM_MC_COMMIT_EFLAGS(EFlags);
4753 IEM_MC_ADVANCE_RIP_AND_FINISH();
4754 IEM_MC_END();
4755}
4756
4757
4758/**
4759 * @opcode 0x9f
4760 */
4761FNIEMOP_DEF(iemOp_lahf)
4762{
4763 IEMOP_MNEMONIC(lahf, "lahf");
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4765 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4766 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4767 return IEMOP_RAISE_INVALID_OPCODE();
4768 IEM_MC_BEGIN(0, 1);
4769 IEM_MC_LOCAL(uint8_t, u8Flags);
4770 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4771 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4772 IEM_MC_ADVANCE_RIP_AND_FINISH();
4773 IEM_MC_END();
4774}
4775
4776
4777/**
4778 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4779 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4780 * prefixes. Will return on failures.
4781 * @param a_GCPtrMemOff The variable to store the offset in.
4782 */
4783#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4784 do \
4785 { \
4786 switch (pVCpu->iem.s.enmEffAddrMode) \
4787 { \
4788 case IEMMODE_16BIT: \
4789 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4790 break; \
4791 case IEMMODE_32BIT: \
4792 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4793 break; \
4794 case IEMMODE_64BIT: \
4795 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4796 break; \
4797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4798 } \
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4800 } while (0)
4801
4802/**
4803 * @opcode 0xa0
4804 */
4805FNIEMOP_DEF(iemOp_mov_AL_Ob)
4806{
4807 /*
4808 * Get the offset and fend off lock prefixes.
4809 */
4810 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4811 RTGCPTR GCPtrMemOff;
4812 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4813
4814 /*
4815 * Fetch AL.
4816 */
4817 IEM_MC_BEGIN(0,1);
4818 IEM_MC_LOCAL(uint8_t, u8Tmp);
4819 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4820 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4821 IEM_MC_ADVANCE_RIP_AND_FINISH();
4822 IEM_MC_END();
4823}
4824
4825
4826/**
4827 * @opcode 0xa1
4828 */
4829FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4830{
4831 /*
4832 * Get the offset and fend off lock prefixes.
4833 */
4834 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4835 RTGCPTR GCPtrMemOff;
4836 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4837
4838 /*
4839 * Fetch rAX.
4840 */
4841 switch (pVCpu->iem.s.enmEffOpSize)
4842 {
4843 case IEMMODE_16BIT:
4844 IEM_MC_BEGIN(0,1);
4845 IEM_MC_LOCAL(uint16_t, u16Tmp);
4846 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4847 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4848 IEM_MC_ADVANCE_RIP_AND_FINISH();
4849 IEM_MC_END();
4850 break;
4851
4852 case IEMMODE_32BIT:
4853 IEM_MC_BEGIN(0,1);
4854 IEM_MC_LOCAL(uint32_t, u32Tmp);
4855 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4856 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4857 IEM_MC_ADVANCE_RIP_AND_FINISH();
4858 IEM_MC_END();
4859 break;
4860
4861 case IEMMODE_64BIT:
4862 IEM_MC_BEGIN(0,1);
4863 IEM_MC_LOCAL(uint64_t, u64Tmp);
4864 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4865 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4866 IEM_MC_ADVANCE_RIP_AND_FINISH();
4867 IEM_MC_END();
4868 break;
4869
4870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4871 }
4872}
4873
4874
4875/**
4876 * @opcode 0xa2
4877 */
4878FNIEMOP_DEF(iemOp_mov_Ob_AL)
4879{
4880 /*
4881 * Get the offset and fend off lock prefixes.
4882 */
4883 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4884 RTGCPTR GCPtrMemOff;
4885 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4886
4887 /*
4888 * Store AL.
4889 */
4890 IEM_MC_BEGIN(0,1);
4891 IEM_MC_LOCAL(uint8_t, u8Tmp);
4892 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4893 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4894 IEM_MC_ADVANCE_RIP_AND_FINISH();
4895 IEM_MC_END();
4896}
4897
4898
4899/**
4900 * @opcode 0xa3
4901 */
4902FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4903{
4904 /*
4905 * Get the offset and fend off lock prefixes.
4906 */
4907 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4908 RTGCPTR GCPtrMemOff;
4909 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4910
4911 /*
4912 * Store rAX.
4913 */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(0,1);
4918 IEM_MC_LOCAL(uint16_t, u16Tmp);
4919 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4920 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4921 IEM_MC_ADVANCE_RIP_AND_FINISH();
4922 IEM_MC_END();
4923 break;
4924
4925 case IEMMODE_32BIT:
4926 IEM_MC_BEGIN(0,1);
4927 IEM_MC_LOCAL(uint32_t, u32Tmp);
4928 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4929 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4930 IEM_MC_ADVANCE_RIP_AND_FINISH();
4931 IEM_MC_END();
4932 break;
4933
4934 case IEMMODE_64BIT:
4935 IEM_MC_BEGIN(0,1);
4936 IEM_MC_LOCAL(uint64_t, u64Tmp);
4937 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4938 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4939 IEM_MC_ADVANCE_RIP_AND_FINISH();
4940 IEM_MC_END();
4941 break;
4942
4943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4944 }
4945}
4946
4947/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4948#define IEM_MOVS_CASE(ValBits, AddrBits) \
4949 IEM_MC_BEGIN(0, 2); \
4950 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4951 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4952 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4953 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4954 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4955 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4957 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4958 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4959 } IEM_MC_ELSE() { \
4960 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4961 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4962 } IEM_MC_ENDIF(); \
4963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4964 IEM_MC_END()
4965
4966/**
4967 * @opcode 0xa4
4968 */
4969FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4970{
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972
4973 /*
4974 * Use the C implementation if a repeat prefix is encountered.
4975 */
4976 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4977 {
4978 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4979 switch (pVCpu->iem.s.enmEffAddrMode)
4980 {
4981 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4982 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4983 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4985 }
4986 }
4987 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4988
4989 /*
4990 * Sharing case implementation with movs[wdq] below.
4991 */
4992 switch (pVCpu->iem.s.enmEffAddrMode)
4993 {
4994 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4995 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4996 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4998 }
4999}
5000
5001
5002/**
5003 * @opcode 0xa5
5004 */
5005FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5006{
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008
5009 /*
5010 * Use the C implementation if a repeat prefix is encountered.
5011 */
5012 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5013 {
5014 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5015 switch (pVCpu->iem.s.enmEffOpSize)
5016 {
5017 case IEMMODE_16BIT:
5018 switch (pVCpu->iem.s.enmEffAddrMode)
5019 {
5020 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5021 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5022 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5024 }
5025 break;
5026 case IEMMODE_32BIT:
5027 switch (pVCpu->iem.s.enmEffAddrMode)
5028 {
5029 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5033 }
5034 case IEMMODE_64BIT:
5035 switch (pVCpu->iem.s.enmEffAddrMode)
5036 {
5037 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5041 }
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 }
5045 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5046
5047 /*
5048 * Annoying double switch here.
5049 * Using ugly macro for implementing the cases, sharing it with movsb.
5050 */
5051 switch (pVCpu->iem.s.enmEffOpSize)
5052 {
5053 case IEMMODE_16BIT:
5054 switch (pVCpu->iem.s.enmEffAddrMode)
5055 {
5056 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5057 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5058 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061 break;
5062
5063 case IEMMODE_32BIT:
5064 switch (pVCpu->iem.s.enmEffAddrMode)
5065 {
5066 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5067 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5068 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5070 }
5071 break;
5072
5073 case IEMMODE_64BIT:
5074 switch (pVCpu->iem.s.enmEffAddrMode)
5075 {
5076 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5077 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5078 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5080 }
5081 break;
5082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5083 }
5084}
5085
5086#undef IEM_MOVS_CASE
5087
5088/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5089#define IEM_CMPS_CASE(ValBits, AddrBits) \
5090 IEM_MC_BEGIN(3, 3); \
5091 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5092 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5093 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5094 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5095 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5096 \
5097 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5098 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5099 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5100 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5101 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5102 IEM_MC_REF_EFLAGS(pEFlags); \
5103 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5104 \
5105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5106 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5107 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5108 } IEM_MC_ELSE() { \
5109 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5110 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5111 } IEM_MC_ENDIF(); \
5112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5113 IEM_MC_END()
5114
5115/**
5116 * @opcode 0xa6
5117 */
5118FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5119{
5120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5121
5122 /*
5123 * Use the C implementation if a repeat prefix is encountered.
5124 */
5125 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5126 {
5127 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5128 switch (pVCpu->iem.s.enmEffAddrMode)
5129 {
5130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5134 }
5135 }
5136 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5137 {
5138 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5139 switch (pVCpu->iem.s.enmEffAddrMode)
5140 {
5141 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5142 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5143 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5145 }
5146 }
5147 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5148
5149 /*
5150 * Sharing case implementation with cmps[wdq] below.
5151 */
5152 switch (pVCpu->iem.s.enmEffAddrMode)
5153 {
5154 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5155 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5156 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5158 }
5159}
5160
5161
5162/**
5163 * @opcode 0xa7
5164 */
5165FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5166{
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168
5169 /*
5170 * Use the C implementation if a repeat prefix is encountered.
5171 */
5172 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5173 {
5174 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5175 switch (pVCpu->iem.s.enmEffOpSize)
5176 {
5177 case IEMMODE_16BIT:
5178 switch (pVCpu->iem.s.enmEffAddrMode)
5179 {
5180 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5181 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5182 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5184 }
5185 break;
5186 case IEMMODE_32BIT:
5187 switch (pVCpu->iem.s.enmEffAddrMode)
5188 {
5189 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5190 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5191 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5193 }
5194 case IEMMODE_64BIT:
5195 switch (pVCpu->iem.s.enmEffAddrMode)
5196 {
5197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5198 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5199 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5201 }
5202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5203 }
5204 }
5205
5206 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5207 {
5208 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5209 switch (pVCpu->iem.s.enmEffOpSize)
5210 {
5211 case IEMMODE_16BIT:
5212 switch (pVCpu->iem.s.enmEffAddrMode)
5213 {
5214 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5215 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5216 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5218 }
5219 break;
5220 case IEMMODE_32BIT:
5221 switch (pVCpu->iem.s.enmEffAddrMode)
5222 {
5223 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5224 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5225 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5227 }
5228 case IEMMODE_64BIT:
5229 switch (pVCpu->iem.s.enmEffAddrMode)
5230 {
5231 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5232 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5233 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5235 }
5236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5237 }
5238 }
5239
5240 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5241
5242 /*
5243 * Annoying double switch here.
5244 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5245 */
5246 switch (pVCpu->iem.s.enmEffOpSize)
5247 {
5248 case IEMMODE_16BIT:
5249 switch (pVCpu->iem.s.enmEffAddrMode)
5250 {
5251 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5252 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5253 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5255 }
5256 break;
5257
5258 case IEMMODE_32BIT:
5259 switch (pVCpu->iem.s.enmEffAddrMode)
5260 {
5261 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5262 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5263 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 break;
5267
5268 case IEMMODE_64BIT:
5269 switch (pVCpu->iem.s.enmEffAddrMode)
5270 {
5271 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5272 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5273 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5275 }
5276 break;
5277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5278 }
5279}
5280
5281#undef IEM_CMPS_CASE
5282
5283/**
5284 * @opcode 0xa8
5285 */
5286FNIEMOP_DEF(iemOp_test_AL_Ib)
5287{
5288 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5290 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5291}
5292
5293
5294/**
5295 * @opcode 0xa9
5296 */
5297FNIEMOP_DEF(iemOp_test_eAX_Iz)
5298{
5299 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5300 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5301 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5302}
5303
5304
5305/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5306#define IEM_STOS_CASE(ValBits, AddrBits) \
5307 IEM_MC_BEGIN(0, 2); \
5308 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5309 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5310 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5311 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5312 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5314 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5315 } IEM_MC_ELSE() { \
5316 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5317 } IEM_MC_ENDIF(); \
5318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5319 IEM_MC_END()
5320
5321/**
5322 * @opcode 0xaa
5323 */
5324FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5325{
5326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5327
5328 /*
5329 * Use the C implementation if a repeat prefix is encountered.
5330 */
5331 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5332 {
5333 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5334 switch (pVCpu->iem.s.enmEffAddrMode)
5335 {
5336 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5337 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5338 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5340 }
5341 }
5342 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5343
5344 /*
5345 * Sharing case implementation with stos[wdq] below.
5346 */
5347 switch (pVCpu->iem.s.enmEffAddrMode)
5348 {
5349 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5350 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5351 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354}
5355
5356
5357/**
5358 * @opcode 0xab
5359 */
5360FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5361{
5362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5363
5364 /*
5365 * Use the C implementation if a repeat prefix is encountered.
5366 */
5367 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5368 {
5369 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5370 switch (pVCpu->iem.s.enmEffOpSize)
5371 {
5372 case IEMMODE_16BIT:
5373 switch (pVCpu->iem.s.enmEffAddrMode)
5374 {
5375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5379 }
5380 break;
5381 case IEMMODE_32BIT:
5382 switch (pVCpu->iem.s.enmEffAddrMode)
5383 {
5384 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5385 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5386 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5388 }
5389 case IEMMODE_64BIT:
5390 switch (pVCpu->iem.s.enmEffAddrMode)
5391 {
5392 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5393 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5394 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5396 }
5397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5398 }
5399 }
5400 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5401
5402 /*
5403 * Annoying double switch here.
5404 * Using ugly macro for implementing the cases, sharing it with stosb.
5405 */
5406 switch (pVCpu->iem.s.enmEffOpSize)
5407 {
5408 case IEMMODE_16BIT:
5409 switch (pVCpu->iem.s.enmEffAddrMode)
5410 {
5411 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5412 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5413 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5415 }
5416 break;
5417
5418 case IEMMODE_32BIT:
5419 switch (pVCpu->iem.s.enmEffAddrMode)
5420 {
5421 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5422 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5423 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5425 }
5426 break;
5427
5428 case IEMMODE_64BIT:
5429 switch (pVCpu->iem.s.enmEffAddrMode)
5430 {
5431 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5432 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5433 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5435 }
5436 break;
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5438 }
5439}
5440
5441#undef IEM_STOS_CASE
5442
5443/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5444#define IEM_LODS_CASE(ValBits, AddrBits) \
5445 IEM_MC_BEGIN(0, 2); \
5446 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5447 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5448 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5449 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5450 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5452 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5453 } IEM_MC_ELSE() { \
5454 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5455 } IEM_MC_ENDIF(); \
5456 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5457 IEM_MC_END()
5458
5459/**
5460 * @opcode 0xac
5461 */
5462FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5463{
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465
5466 /*
5467 * Use the C implementation if a repeat prefix is encountered.
5468 */
5469 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5470 {
5471 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5472 switch (pVCpu->iem.s.enmEffAddrMode)
5473 {
5474 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5475 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5476 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5478 }
5479 }
5480 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5481
5482 /*
5483 * Sharing case implementation with stos[wdq] below.
5484 */
5485 switch (pVCpu->iem.s.enmEffAddrMode)
5486 {
5487 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5488 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5489 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5491 }
5492}
5493
5494
5495/**
5496 * @opcode 0xad
5497 */
5498FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5499{
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5501
5502 /*
5503 * Use the C implementation if a repeat prefix is encountered.
5504 */
5505 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5506 {
5507 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5508 switch (pVCpu->iem.s.enmEffOpSize)
5509 {
5510 case IEMMODE_16BIT:
5511 switch (pVCpu->iem.s.enmEffAddrMode)
5512 {
5513 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5514 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5515 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5517 }
5518 break;
5519 case IEMMODE_32BIT:
5520 switch (pVCpu->iem.s.enmEffAddrMode)
5521 {
5522 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5523 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5524 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5526 }
5527 case IEMMODE_64BIT:
5528 switch (pVCpu->iem.s.enmEffAddrMode)
5529 {
5530 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5531 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5532 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5536 }
5537 }
5538 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5539
5540 /*
5541 * Annoying double switch here.
5542 * Using ugly macro for implementing the cases, sharing it with lodsb.
5543 */
5544 switch (pVCpu->iem.s.enmEffOpSize)
5545 {
5546 case IEMMODE_16BIT:
5547 switch (pVCpu->iem.s.enmEffAddrMode)
5548 {
5549 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5550 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5551 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5553 }
5554 break;
5555
5556 case IEMMODE_32BIT:
5557 switch (pVCpu->iem.s.enmEffAddrMode)
5558 {
5559 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5560 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5561 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5563 }
5564 break;
5565
5566 case IEMMODE_64BIT:
5567 switch (pVCpu->iem.s.enmEffAddrMode)
5568 {
5569 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5570 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5571 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5573 }
5574 break;
5575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5576 }
5577}
5578
5579#undef IEM_LODS_CASE
5580
5581/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5582#define IEM_SCAS_CASE(ValBits, AddrBits) \
5583 IEM_MC_BEGIN(3, 2); \
5584 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5585 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5586 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5587 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5588 \
5589 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5590 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5591 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5592 IEM_MC_REF_EFLAGS(pEFlags); \
5593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5594 \
5595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5596 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5597 } IEM_MC_ELSE() { \
5598 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5599 } IEM_MC_ENDIF(); \
5600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5601 IEM_MC_END()
5602
5603/**
5604 * @opcode 0xae
5605 */
5606FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5607{
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5609
5610 /*
5611 * Use the C implementation if a repeat prefix is encountered.
5612 */
5613 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5614 {
5615 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5616 switch (pVCpu->iem.s.enmEffAddrMode)
5617 {
5618 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5619 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5620 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5625 {
5626 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5627 switch (pVCpu->iem.s.enmEffAddrMode)
5628 {
5629 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5630 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5631 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5633 }
5634 }
5635 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5636
5637 /*
5638 * Sharing case implementation with stos[wdq] below.
5639 */
5640 switch (pVCpu->iem.s.enmEffAddrMode)
5641 {
5642 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5643 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5644 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5646 }
5647}
5648
5649
5650/**
5651 * @opcode 0xaf
5652 */
5653FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5654{
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656
5657 /*
5658 * Use the C implementation if a repeat prefix is encountered.
5659 */
5660 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5661 {
5662 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5663 switch (pVCpu->iem.s.enmEffOpSize)
5664 {
5665 case IEMMODE_16BIT:
5666 switch (pVCpu->iem.s.enmEffAddrMode)
5667 {
5668 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5669 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5670 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5672 }
5673 break;
5674 case IEMMODE_32BIT:
5675 switch (pVCpu->iem.s.enmEffAddrMode)
5676 {
5677 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5678 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5679 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5681 }
5682 case IEMMODE_64BIT:
5683 switch (pVCpu->iem.s.enmEffAddrMode)
5684 {
5685 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5689 }
5690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5691 }
5692 }
5693 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5694 {
5695 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5696 switch (pVCpu->iem.s.enmEffOpSize)
5697 {
5698 case IEMMODE_16BIT:
5699 switch (pVCpu->iem.s.enmEffAddrMode)
5700 {
5701 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5702 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5703 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5705 }
5706 break;
5707 case IEMMODE_32BIT:
5708 switch (pVCpu->iem.s.enmEffAddrMode)
5709 {
5710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5714 }
5715 case IEMMODE_64BIT:
5716 switch (pVCpu->iem.s.enmEffAddrMode)
5717 {
5718 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5719 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5720 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5722 }
5723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5724 }
5725 }
5726 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5727
5728 /*
5729 * Annoying double switch here.
5730 * Using ugly macro for implementing the cases, sharing it with scasb.
5731 */
5732 switch (pVCpu->iem.s.enmEffOpSize)
5733 {
5734 case IEMMODE_16BIT:
5735 switch (pVCpu->iem.s.enmEffAddrMode)
5736 {
5737 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5738 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5739 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 break;
5743
5744 case IEMMODE_32BIT:
5745 switch (pVCpu->iem.s.enmEffAddrMode)
5746 {
5747 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5748 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5749 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5751 }
5752 break;
5753
5754 case IEMMODE_64BIT:
5755 switch (pVCpu->iem.s.enmEffAddrMode)
5756 {
5757 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5758 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5759 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5761 }
5762 break;
5763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5764 }
5765}
5766
5767#undef IEM_SCAS_CASE
5768
5769/**
5770 * Common 'mov r8, imm8' helper.
5771 */
5772FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5773{
5774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776
5777 IEM_MC_BEGIN(0, 1);
5778 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5779 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5780 IEM_MC_ADVANCE_RIP_AND_FINISH();
5781 IEM_MC_END();
5782}
5783
5784
5785/**
5786 * @opcode 0xb0
5787 */
5788FNIEMOP_DEF(iemOp_mov_AL_Ib)
5789{
5790 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5791 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5792}
5793
5794
5795/**
5796 * @opcode 0xb1
5797 */
5798FNIEMOP_DEF(iemOp_CL_Ib)
5799{
5800 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5801 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5802}
5803
5804
5805/**
5806 * @opcode 0xb2
5807 */
5808FNIEMOP_DEF(iemOp_DL_Ib)
5809{
5810 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5811 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5812}
5813
5814
5815/**
5816 * @opcode 0xb3
5817 */
5818FNIEMOP_DEF(iemOp_BL_Ib)
5819{
5820 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5821 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5822}
5823
5824
5825/**
5826 * @opcode 0xb4
5827 */
5828FNIEMOP_DEF(iemOp_mov_AH_Ib)
5829{
5830 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5831 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5832}
5833
5834
5835/**
5836 * @opcode 0xb5
5837 */
5838FNIEMOP_DEF(iemOp_CH_Ib)
5839{
5840 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5841 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5842}
5843
5844
5845/**
5846 * @opcode 0xb6
5847 */
5848FNIEMOP_DEF(iemOp_DH_Ib)
5849{
5850 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5851 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5852}
5853
5854
5855/**
5856 * @opcode 0xb7
5857 */
5858FNIEMOP_DEF(iemOp_BH_Ib)
5859{
5860 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5861 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5862}
5863
5864
5865/**
5866 * Common 'mov regX,immX' helper.
5867 */
5868FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5869{
5870 switch (pVCpu->iem.s.enmEffOpSize)
5871 {
5872 case IEMMODE_16BIT:
5873 {
5874 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876
5877 IEM_MC_BEGIN(0, 1);
5878 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5879 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5880 IEM_MC_ADVANCE_RIP_AND_FINISH();
5881 IEM_MC_END();
5882 break;
5883 }
5884
5885 case IEMMODE_32BIT:
5886 {
5887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5889
5890 IEM_MC_BEGIN(0, 1);
5891 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5892 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5893 IEM_MC_ADVANCE_RIP_AND_FINISH();
5894 IEM_MC_END();
5895 break;
5896 }
5897 case IEMMODE_64BIT:
5898 {
5899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901
5902 IEM_MC_BEGIN(0, 1);
5903 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5904 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5905 IEM_MC_ADVANCE_RIP_AND_FINISH();
5906 IEM_MC_END();
5907 break;
5908 }
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911}
5912
5913
5914/**
5915 * @opcode 0xb8
5916 */
5917FNIEMOP_DEF(iemOp_eAX_Iv)
5918{
5919 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5920 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5921}
5922
5923
5924/**
5925 * @opcode 0xb9
5926 */
5927FNIEMOP_DEF(iemOp_eCX_Iv)
5928{
5929 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5930 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5931}
5932
5933
5934/**
5935 * @opcode 0xba
5936 */
5937FNIEMOP_DEF(iemOp_eDX_Iv)
5938{
5939 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5940 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5941}
5942
5943
5944/**
5945 * @opcode 0xbb
5946 */
5947FNIEMOP_DEF(iemOp_eBX_Iv)
5948{
5949 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5950 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5951}
5952
5953
5954/**
5955 * @opcode 0xbc
5956 */
5957FNIEMOP_DEF(iemOp_eSP_Iv)
5958{
5959 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5960 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5961}
5962
5963
5964/**
5965 * @opcode 0xbd
5966 */
5967FNIEMOP_DEF(iemOp_eBP_Iv)
5968{
5969 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5970 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5971}
5972
5973
5974/**
5975 * @opcode 0xbe
5976 */
5977FNIEMOP_DEF(iemOp_eSI_Iv)
5978{
5979 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5980 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5981}
5982
5983
5984/**
5985 * @opcode 0xbf
5986 */
5987FNIEMOP_DEF(iemOp_eDI_Iv)
5988{
5989 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5990 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5991}
5992
5993
5994/**
5995 * @opcode 0xc0
5996 */
5997FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5998{
5999 IEMOP_HLP_MIN_186();
6000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6001 PCIEMOPSHIFTSIZES pImpl;
6002 switch (IEM_GET_MODRM_REG_8(bRm))
6003 {
6004 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6005 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6006 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6007 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6008 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6009 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6010 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6011 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6012 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6013 }
6014 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6015
6016 if (IEM_IS_MODRM_REG_MODE(bRm))
6017 {
6018 /* register */
6019 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6021 IEM_MC_BEGIN(3, 0);
6022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6023 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6024 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6025 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6026 IEM_MC_REF_EFLAGS(pEFlags);
6027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6028 IEM_MC_ADVANCE_RIP_AND_FINISH();
6029 IEM_MC_END();
6030 }
6031 else
6032 {
6033 /* memory */
6034 IEM_MC_BEGIN(3, 2);
6035 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6036 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6039
6040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6041 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6042 IEM_MC_ASSIGN(cShiftArg, cShift);
6043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6044 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6045 IEM_MC_FETCH_EFLAGS(EFlags);
6046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6047
6048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6049 IEM_MC_COMMIT_EFLAGS(EFlags);
6050 IEM_MC_ADVANCE_RIP_AND_FINISH();
6051 IEM_MC_END();
6052 }
6053}
6054
6055
6056/**
6057 * @opcode 0xc1
6058 */
6059FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6060{
6061 IEMOP_HLP_MIN_186();
6062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6063 PCIEMOPSHIFTSIZES pImpl;
6064 switch (IEM_GET_MODRM_REG_8(bRm))
6065 {
6066 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6067 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6068 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6069 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6070 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6071 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6072 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6073 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6074 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6075 }
6076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6077
6078 if (IEM_IS_MODRM_REG_MODE(bRm))
6079 {
6080 /* register */
6081 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6083 switch (pVCpu->iem.s.enmEffOpSize)
6084 {
6085 case IEMMODE_16BIT:
6086 IEM_MC_BEGIN(3, 0);
6087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6088 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6090 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6091 IEM_MC_REF_EFLAGS(pEFlags);
6092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6093 IEM_MC_ADVANCE_RIP_AND_FINISH();
6094 IEM_MC_END();
6095 break;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(3, 0);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6101 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6102 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6103 IEM_MC_REF_EFLAGS(pEFlags);
6104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6105 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6106 IEM_MC_ADVANCE_RIP_AND_FINISH();
6107 IEM_MC_END();
6108 break;
6109
6110 case IEMMODE_64BIT:
6111 IEM_MC_BEGIN(3, 0);
6112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6113 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6115 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6118 IEM_MC_ADVANCE_RIP_AND_FINISH();
6119 IEM_MC_END();
6120 break;
6121
6122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6123 }
6124 }
6125 else
6126 {
6127 /* memory */
6128 switch (pVCpu->iem.s.enmEffOpSize)
6129 {
6130 case IEMMODE_16BIT:
6131 IEM_MC_BEGIN(3, 2);
6132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6133 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6134 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6136
6137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6138 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6139 IEM_MC_ASSIGN(cShiftArg, cShift);
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6142 IEM_MC_FETCH_EFLAGS(EFlags);
6143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6144
6145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6146 IEM_MC_COMMIT_EFLAGS(EFlags);
6147 IEM_MC_ADVANCE_RIP_AND_FINISH();
6148 IEM_MC_END();
6149 break;
6150
6151 case IEMMODE_32BIT:
6152 IEM_MC_BEGIN(3, 2);
6153 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6154 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6155 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6157
6158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6159 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6160 IEM_MC_ASSIGN(cShiftArg, cShift);
6161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6162 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6163 IEM_MC_FETCH_EFLAGS(EFlags);
6164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6165
6166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6167 IEM_MC_COMMIT_EFLAGS(EFlags);
6168 IEM_MC_ADVANCE_RIP_AND_FINISH();
6169 IEM_MC_END();
6170 break;
6171
6172 case IEMMODE_64BIT:
6173 IEM_MC_BEGIN(3, 2);
6174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6175 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6176 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6178
6179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6180 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6181 IEM_MC_ASSIGN(cShiftArg, cShift);
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6184 IEM_MC_FETCH_EFLAGS(EFlags);
6185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6186
6187 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6188 IEM_MC_COMMIT_EFLAGS(EFlags);
6189 IEM_MC_ADVANCE_RIP_AND_FINISH();
6190 IEM_MC_END();
6191 break;
6192
6193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6194 }
6195 }
6196}
6197
6198
6199/**
6200 * @opcode 0xc2
6201 */
6202FNIEMOP_DEF(iemOp_retn_Iw)
6203{
6204 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6205 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6206 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
6207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6208 switch (pVCpu->iem.s.enmEffOpSize)
6209 {
6210 case IEMMODE_16BIT:
6211 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_16, u16Imm);
6212 case IEMMODE_32BIT:
6213 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_32, u16Imm);
6214 case IEMMODE_64BIT:
6215 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_64, u16Imm);
6216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6217 }
6218}
6219
6220
6221/**
6222 * @opcode 0xc3
6223 */
6224FNIEMOP_DEF(iemOp_retn)
6225{
6226 IEMOP_MNEMONIC(retn, "retn");
6227 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229 switch (pVCpu->iem.s.enmEffOpSize)
6230 {
6231 case IEMMODE_16BIT:
6232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_16);
6233 case IEMMODE_32BIT:
6234 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_32);
6235 case IEMMODE_64BIT:
6236 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_64);
6237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6238 }
6239}
6240
6241
6242/**
6243 * @opcode 0xc4
6244 */
6245FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6246{
6247 /* The LDS instruction is invalid 64-bit mode. In legacy and
6248 compatability mode it is invalid with MOD=3.
6249 The use as a VEX prefix is made possible by assigning the inverted
6250 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6251 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6253 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6254 || IEM_IS_MODRM_REG_MODE(bRm) )
6255 {
6256 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6257 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6258 {
6259 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6260 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6261 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6262 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6263 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6264 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6265 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6266 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6267 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6268 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6269 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6270 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6271 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6272
6273 switch (bRm & 0x1f)
6274 {
6275 case 1: /* 0x0f lead opcode byte. */
6276#ifdef IEM_WITH_VEX
6277 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6278#else
6279 IEMOP_BITCH_ABOUT_STUB();
6280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6281#endif
6282
6283 case 2: /* 0x0f 0x38 lead opcode bytes. */
6284#ifdef IEM_WITH_VEX
6285 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6286#else
6287 IEMOP_BITCH_ABOUT_STUB();
6288 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6289#endif
6290
6291 case 3: /* 0x0f 0x3a lead opcode bytes. */
6292#ifdef IEM_WITH_VEX
6293 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6294#else
6295 IEMOP_BITCH_ABOUT_STUB();
6296 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6297#endif
6298
6299 default:
6300 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6301 return IEMOP_RAISE_INVALID_OPCODE();
6302 }
6303 }
6304 Log(("VEX3: AVX support disabled!\n"));
6305 return IEMOP_RAISE_INVALID_OPCODE();
6306 }
6307
6308 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6309 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6310}
6311
6312
6313/**
6314 * @opcode 0xc5
6315 */
6316FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6317{
6318 /* The LES instruction is invalid 64-bit mode. In legacy and
6319 compatability mode it is invalid with MOD=3.
6320 The use as a VEX prefix is made possible by assigning the inverted
6321 REX.R to the top MOD bit, and the top bit in the inverted register
6322 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6323 to accessing registers 0..7 in this VEX form. */
6324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6325 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6326 || IEM_IS_MODRM_REG_MODE(bRm))
6327 {
6328 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6329 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6330 {
6331 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6332 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6333 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6334 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6335 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6336 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6337 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6338 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6339
6340#ifdef IEM_WITH_VEX
6341 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6342#else
6343 IEMOP_BITCH_ABOUT_STUB();
6344 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6345#endif
6346 }
6347
6348 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6349 Log(("VEX2: AVX support disabled!\n"));
6350 return IEMOP_RAISE_INVALID_OPCODE();
6351 }
6352
6353 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6354 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6355}
6356
6357
6358/**
6359 * @opcode 0xc6
6360 */
6361FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6362{
6363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6364 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6365 return IEMOP_RAISE_INVALID_OPCODE();
6366 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6367
6368 if (IEM_IS_MODRM_REG_MODE(bRm))
6369 {
6370 /* register access */
6371 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373 IEM_MC_BEGIN(0, 0);
6374 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
6375 IEM_MC_ADVANCE_RIP_AND_FINISH();
6376 IEM_MC_END();
6377 }
6378 else
6379 {
6380 /* memory access. */
6381 IEM_MC_BEGIN(0, 1);
6382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6384 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6386 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6387 IEM_MC_ADVANCE_RIP_AND_FINISH();
6388 IEM_MC_END();
6389 }
6390}
6391
6392
6393/**
6394 * @opcode 0xc7
6395 */
6396FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6397{
6398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6399 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6400 return IEMOP_RAISE_INVALID_OPCODE();
6401 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6402
6403 if (IEM_IS_MODRM_REG_MODE(bRm))
6404 {
6405 /* register access */
6406 switch (pVCpu->iem.s.enmEffOpSize)
6407 {
6408 case IEMMODE_16BIT:
6409 IEM_MC_BEGIN(0, 0);
6410 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6412 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
6413 IEM_MC_ADVANCE_RIP_AND_FINISH();
6414 IEM_MC_END();
6415 break;
6416
6417 case IEMMODE_32BIT:
6418 IEM_MC_BEGIN(0, 0);
6419 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
6422 IEM_MC_ADVANCE_RIP_AND_FINISH();
6423 IEM_MC_END();
6424 break;
6425
6426 case IEMMODE_64BIT:
6427 IEM_MC_BEGIN(0, 0);
6428 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6430 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
6431 IEM_MC_ADVANCE_RIP_AND_FINISH();
6432 IEM_MC_END();
6433 break;
6434
6435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6436 }
6437 }
6438 else
6439 {
6440 /* memory access. */
6441 switch (pVCpu->iem.s.enmEffOpSize)
6442 {
6443 case IEMMODE_16BIT:
6444 IEM_MC_BEGIN(0, 1);
6445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6450 IEM_MC_ADVANCE_RIP_AND_FINISH();
6451 IEM_MC_END();
6452 break;
6453
6454 case IEMMODE_32BIT:
6455 IEM_MC_BEGIN(0, 1);
6456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6458 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6460 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6461 IEM_MC_ADVANCE_RIP_AND_FINISH();
6462 IEM_MC_END();
6463 break;
6464
6465 case IEMMODE_64BIT:
6466 IEM_MC_BEGIN(0, 1);
6467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6469 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6472 IEM_MC_ADVANCE_RIP_AND_FINISH();
6473 IEM_MC_END();
6474 break;
6475
6476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6477 }
6478 }
6479}
6480
6481
6482
6483
6484/**
6485 * @opcode 0xc8
6486 */
6487FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6488{
6489 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6490 IEMOP_HLP_MIN_186();
6491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6492 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6493 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6495 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6496}
6497
6498
6499/**
6500 * @opcode 0xc9
6501 */
6502FNIEMOP_DEF(iemOp_leave)
6503{
6504 IEMOP_MNEMONIC(leave, "leave");
6505 IEMOP_HLP_MIN_186();
6506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6509}
6510
6511
6512/**
6513 * @opcode 0xca
6514 */
6515FNIEMOP_DEF(iemOp_retf_Iw)
6516{
6517 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6518 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6520 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6521}
6522
6523
6524/**
6525 * @opcode 0xcb
6526 */
6527FNIEMOP_DEF(iemOp_retf)
6528{
6529 IEMOP_MNEMONIC(retf, "retf");
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6531 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6532}
6533
6534
6535/**
6536 * @opcode 0xcc
6537 */
6538FNIEMOP_DEF(iemOp_int3)
6539{
6540 IEMOP_MNEMONIC(int3, "int3");
6541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6542 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6543}
6544
6545
6546/**
6547 * @opcode 0xcd
6548 */
6549FNIEMOP_DEF(iemOp_int_Ib)
6550{
6551 IEMOP_MNEMONIC(int_Ib, "int Ib");
6552 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6554 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6555}
6556
6557
6558/**
6559 * @opcode 0xce
6560 */
6561FNIEMOP_DEF(iemOp_into)
6562{
6563 IEMOP_MNEMONIC(into, "into");
6564 IEMOP_HLP_NO_64BIT();
6565
6566 IEM_MC_BEGIN(2, 0);
6567 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6568 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6569 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6570 IEM_MC_END();
6571 return VINF_SUCCESS;
6572}
6573
6574
6575/**
6576 * @opcode 0xcf
6577 */
6578FNIEMOP_DEF(iemOp_iret)
6579{
6580 IEMOP_MNEMONIC(iret, "iret");
6581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6582 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6583}
6584
6585
6586/**
6587 * @opcode 0xd0
6588 */
6589FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6590{
6591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6592 PCIEMOPSHIFTSIZES pImpl;
6593 switch (IEM_GET_MODRM_REG_8(bRm))
6594 {
6595 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6596 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6597 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6598 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6599 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6600 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6601 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6602 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6603 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6604 }
6605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6606
6607 if (IEM_IS_MODRM_REG_MODE(bRm))
6608 {
6609 /* register */
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6611 IEM_MC_BEGIN(3, 0);
6612 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6613 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6614 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6615 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6616 IEM_MC_REF_EFLAGS(pEFlags);
6617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6618 IEM_MC_ADVANCE_RIP_AND_FINISH();
6619 IEM_MC_END();
6620 }
6621 else
6622 {
6623 /* memory */
6624 IEM_MC_BEGIN(3, 2);
6625 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6626 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6627 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6629
6630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6632 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6633 IEM_MC_FETCH_EFLAGS(EFlags);
6634 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6635
6636 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6637 IEM_MC_COMMIT_EFLAGS(EFlags);
6638 IEM_MC_ADVANCE_RIP_AND_FINISH();
6639 IEM_MC_END();
6640 }
6641}
6642
6643
6644
6645/**
6646 * @opcode 0xd1
6647 */
6648FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6649{
6650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6651 PCIEMOPSHIFTSIZES pImpl;
6652 switch (IEM_GET_MODRM_REG_8(bRm))
6653 {
6654 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6655 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6656 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6657 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6658 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6659 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6660 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6661 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6662 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6663 }
6664 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6665
6666 if (IEM_IS_MODRM_REG_MODE(bRm))
6667 {
6668 /* register */
6669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6670 switch (pVCpu->iem.s.enmEffOpSize)
6671 {
6672 case IEMMODE_16BIT:
6673 IEM_MC_BEGIN(3, 0);
6674 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6675 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6677 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6678 IEM_MC_REF_EFLAGS(pEFlags);
6679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6680 IEM_MC_ADVANCE_RIP_AND_FINISH();
6681 IEM_MC_END();
6682 break;
6683
6684 case IEMMODE_32BIT:
6685 IEM_MC_BEGIN(3, 0);
6686 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6687 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6689 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6690 IEM_MC_REF_EFLAGS(pEFlags);
6691 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6692 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6693 IEM_MC_ADVANCE_RIP_AND_FINISH();
6694 IEM_MC_END();
6695 break;
6696
6697 case IEMMODE_64BIT:
6698 IEM_MC_BEGIN(3, 0);
6699 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6700 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6701 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6702 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6703 IEM_MC_REF_EFLAGS(pEFlags);
6704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6705 IEM_MC_ADVANCE_RIP_AND_FINISH();
6706 IEM_MC_END();
6707 break;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711 }
6712 else
6713 {
6714 /* memory */
6715 switch (pVCpu->iem.s.enmEffOpSize)
6716 {
6717 case IEMMODE_16BIT:
6718 IEM_MC_BEGIN(3, 2);
6719 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6720 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6721 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6723
6724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6726 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6727 IEM_MC_FETCH_EFLAGS(EFlags);
6728 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6729
6730 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6731 IEM_MC_COMMIT_EFLAGS(EFlags);
6732 IEM_MC_ADVANCE_RIP_AND_FINISH();
6733 IEM_MC_END();
6734 break;
6735
6736 case IEMMODE_32BIT:
6737 IEM_MC_BEGIN(3, 2);
6738 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6739 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6740 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6742
6743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6745 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6746 IEM_MC_FETCH_EFLAGS(EFlags);
6747 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6748
6749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6750 IEM_MC_COMMIT_EFLAGS(EFlags);
6751 IEM_MC_ADVANCE_RIP_AND_FINISH();
6752 IEM_MC_END();
6753 break;
6754
6755 case IEMMODE_64BIT:
6756 IEM_MC_BEGIN(3, 2);
6757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6758 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6759 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6761
6762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6764 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6765 IEM_MC_FETCH_EFLAGS(EFlags);
6766 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6767
6768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6769 IEM_MC_COMMIT_EFLAGS(EFlags);
6770 IEM_MC_ADVANCE_RIP_AND_FINISH();
6771 IEM_MC_END();
6772 break;
6773
6774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6775 }
6776 }
6777}
6778
6779
6780/**
6781 * @opcode 0xd2
6782 */
6783FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6784{
6785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6786 PCIEMOPSHIFTSIZES pImpl;
6787 switch (IEM_GET_MODRM_REG_8(bRm))
6788 {
6789 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6790 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6791 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6792 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6793 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6794 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6795 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6796 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6798 }
6799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6800
6801 if (IEM_IS_MODRM_REG_MODE(bRm))
6802 {
6803 /* register */
6804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6805 IEM_MC_BEGIN(3, 0);
6806 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6807 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6808 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6809 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6810 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6811 IEM_MC_REF_EFLAGS(pEFlags);
6812 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6813 IEM_MC_ADVANCE_RIP_AND_FINISH();
6814 IEM_MC_END();
6815 }
6816 else
6817 {
6818 /* memory */
6819 IEM_MC_BEGIN(3, 2);
6820 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6821 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6822 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6824
6825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6827 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6828 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6829 IEM_MC_FETCH_EFLAGS(EFlags);
6830 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6831
6832 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6833 IEM_MC_COMMIT_EFLAGS(EFlags);
6834 IEM_MC_ADVANCE_RIP_AND_FINISH();
6835 IEM_MC_END();
6836 }
6837}
6838
6839
6840/**
6841 * @opcode 0xd3
6842 */
6843FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6844{
6845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6846 PCIEMOPSHIFTSIZES pImpl;
6847 switch (IEM_GET_MODRM_REG_8(bRm))
6848 {
6849 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6850 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6851 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6852 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6853 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6854 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6855 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6856 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6857 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6858 }
6859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6860
6861 if (IEM_IS_MODRM_REG_MODE(bRm))
6862 {
6863 /* register */
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865 switch (pVCpu->iem.s.enmEffOpSize)
6866 {
6867 case IEMMODE_16BIT:
6868 IEM_MC_BEGIN(3, 0);
6869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6870 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6872 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6873 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6874 IEM_MC_REF_EFLAGS(pEFlags);
6875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6876 IEM_MC_ADVANCE_RIP_AND_FINISH();
6877 IEM_MC_END();
6878 break;
6879
6880 case IEMMODE_32BIT:
6881 IEM_MC_BEGIN(3, 0);
6882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6883 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6885 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6886 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6887 IEM_MC_REF_EFLAGS(pEFlags);
6888 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6889 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6890 IEM_MC_ADVANCE_RIP_AND_FINISH();
6891 IEM_MC_END();
6892 break;
6893
6894 case IEMMODE_64BIT:
6895 IEM_MC_BEGIN(3, 0);
6896 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6897 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6898 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6899 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6900 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6901 IEM_MC_REF_EFLAGS(pEFlags);
6902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6903 IEM_MC_ADVANCE_RIP_AND_FINISH();
6904 IEM_MC_END();
6905 break;
6906
6907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6908 }
6909 }
6910 else
6911 {
6912 /* memory */
6913 switch (pVCpu->iem.s.enmEffOpSize)
6914 {
6915 case IEMMODE_16BIT:
6916 IEM_MC_BEGIN(3, 2);
6917 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6918 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6919 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6921
6922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6924 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6925 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6926 IEM_MC_FETCH_EFLAGS(EFlags);
6927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6928
6929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6930 IEM_MC_COMMIT_EFLAGS(EFlags);
6931 IEM_MC_ADVANCE_RIP_AND_FINISH();
6932 IEM_MC_END();
6933 break;
6934
6935 case IEMMODE_32BIT:
6936 IEM_MC_BEGIN(3, 2);
6937 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6938 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6939 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6941
6942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6944 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6945 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6946 IEM_MC_FETCH_EFLAGS(EFlags);
6947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6948
6949 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6950 IEM_MC_COMMIT_EFLAGS(EFlags);
6951 IEM_MC_ADVANCE_RIP_AND_FINISH();
6952 IEM_MC_END();
6953 break;
6954
6955 case IEMMODE_64BIT:
6956 IEM_MC_BEGIN(3, 2);
6957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6958 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6959 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6961
6962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6964 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6965 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6966 IEM_MC_FETCH_EFLAGS(EFlags);
6967 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6968
6969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6970 IEM_MC_COMMIT_EFLAGS(EFlags);
6971 IEM_MC_ADVANCE_RIP_AND_FINISH();
6972 IEM_MC_END();
6973 break;
6974
6975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6976 }
6977 }
6978}
6979
6980/**
6981 * @opcode 0xd4
6982 */
6983FNIEMOP_DEF(iemOp_aam_Ib)
6984{
6985 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6986 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 IEMOP_HLP_NO_64BIT();
6989 if (!bImm)
6990 return IEMOP_RAISE_DIVIDE_ERROR();
6991 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6992}
6993
6994
6995/**
6996 * @opcode 0xd5
6997 */
6998FNIEMOP_DEF(iemOp_aad_Ib)
6999{
7000 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7001 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7003 IEMOP_HLP_NO_64BIT();
7004 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7005}
7006
7007
7008/**
7009 * @opcode 0xd6
7010 */
7011FNIEMOP_DEF(iemOp_salc)
7012{
7013 IEMOP_MNEMONIC(salc, "salc");
7014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7015 IEMOP_HLP_NO_64BIT();
7016
7017 IEM_MC_BEGIN(0, 0);
7018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7019 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7020 } IEM_MC_ELSE() {
7021 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7022 } IEM_MC_ENDIF();
7023 IEM_MC_ADVANCE_RIP_AND_FINISH();
7024 IEM_MC_END();
7025}
7026
7027
7028/**
7029 * @opcode 0xd7
7030 */
7031FNIEMOP_DEF(iemOp_xlat)
7032{
7033 IEMOP_MNEMONIC(xlat, "xlat");
7034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7035 switch (pVCpu->iem.s.enmEffAddrMode)
7036 {
7037 case IEMMODE_16BIT:
7038 IEM_MC_BEGIN(2, 0);
7039 IEM_MC_LOCAL(uint8_t, u8Tmp);
7040 IEM_MC_LOCAL(uint16_t, u16Addr);
7041 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7042 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7043 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7044 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7045 IEM_MC_ADVANCE_RIP_AND_FINISH();
7046 IEM_MC_END();
7047 break;
7048
7049 case IEMMODE_32BIT:
7050 IEM_MC_BEGIN(2, 0);
7051 IEM_MC_LOCAL(uint8_t, u8Tmp);
7052 IEM_MC_LOCAL(uint32_t, u32Addr);
7053 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7054 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7055 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7056 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7057 IEM_MC_ADVANCE_RIP_AND_FINISH();
7058 IEM_MC_END();
7059 break;
7060
7061 case IEMMODE_64BIT:
7062 IEM_MC_BEGIN(2, 0);
7063 IEM_MC_LOCAL(uint8_t, u8Tmp);
7064 IEM_MC_LOCAL(uint64_t, u64Addr);
7065 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7066 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7067 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7068 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7069 IEM_MC_ADVANCE_RIP_AND_FINISH();
7070 IEM_MC_END();
7071 break;
7072
7073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7074 }
7075}
7076
7077
7078/**
7079 * Common worker for FPU instructions working on ST0 and STn, and storing the
7080 * result in ST0.
7081 *
7082 * @param bRm Mod R/M byte.
7083 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7084 */
7085FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7086{
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7088
7089 IEM_MC_BEGIN(3, 1);
7090 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7091 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7092 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7094
7095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7097 IEM_MC_PREPARE_FPU_USAGE();
7098 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7099 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7100 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7101 IEM_MC_ELSE()
7102 IEM_MC_FPU_STACK_UNDERFLOW(0);
7103 IEM_MC_ENDIF();
7104 IEM_MC_ADVANCE_RIP_AND_FINISH();
7105
7106 IEM_MC_END();
7107}
7108
7109
7110/**
7111 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7112 * flags.
7113 *
7114 * @param bRm Mod R/M byte.
7115 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7116 */
7117FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7118{
7119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7120
7121 IEM_MC_BEGIN(3, 1);
7122 IEM_MC_LOCAL(uint16_t, u16Fsw);
7123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7125 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7126
7127 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7128 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7129 IEM_MC_PREPARE_FPU_USAGE();
7130 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7131 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7132 IEM_MC_UPDATE_FSW(u16Fsw);
7133 IEM_MC_ELSE()
7134 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7135 IEM_MC_ENDIF();
7136 IEM_MC_ADVANCE_RIP_AND_FINISH();
7137
7138 IEM_MC_END();
7139}
7140
7141
7142/**
7143 * Common worker for FPU instructions working on ST0 and STn, only affecting
7144 * flags, and popping when done.
7145 *
7146 * @param bRm Mod R/M byte.
7147 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7148 */
7149FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7150{
7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7152
7153 IEM_MC_BEGIN(3, 1);
7154 IEM_MC_LOCAL(uint16_t, u16Fsw);
7155 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7157 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7158
7159 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7160 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7161 IEM_MC_PREPARE_FPU_USAGE();
7162 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7163 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7164 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7165 IEM_MC_ELSE()
7166 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7167 IEM_MC_ENDIF();
7168 IEM_MC_ADVANCE_RIP_AND_FINISH();
7169
7170 IEM_MC_END();
7171}
7172
7173
7174/** Opcode 0xd8 11/0. */
7175FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7176{
7177 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7178 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7179}
7180
7181
7182/** Opcode 0xd8 11/1. */
7183FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7184{
7185 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7186 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7187}
7188
7189
7190/** Opcode 0xd8 11/2. */
7191FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7192{
7193 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7194 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7195}
7196
7197
7198/** Opcode 0xd8 11/3. */
7199FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7200{
7201 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7202 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7203}
7204
7205
7206/** Opcode 0xd8 11/4. */
7207FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7208{
7209 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7210 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7211}
7212
7213
7214/** Opcode 0xd8 11/5. */
7215FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7216{
7217 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7218 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7219}
7220
7221
7222/** Opcode 0xd8 11/6. */
7223FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7224{
7225 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7226 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7227}
7228
7229
7230/** Opcode 0xd8 11/7. */
7231FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7232{
7233 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7234 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7235}
7236
7237
7238/**
7239 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7240 * the result in ST0.
7241 *
7242 * @param bRm Mod R/M byte.
7243 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7244 */
7245FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7246{
7247 IEM_MC_BEGIN(3, 3);
7248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7249 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7250 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7251 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7252 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7253 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7254
7255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7257
7258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7260 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7261
7262 IEM_MC_PREPARE_FPU_USAGE();
7263 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7264 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7265 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7266 IEM_MC_ELSE()
7267 IEM_MC_FPU_STACK_UNDERFLOW(0);
7268 IEM_MC_ENDIF();
7269 IEM_MC_ADVANCE_RIP_AND_FINISH();
7270
7271 IEM_MC_END();
7272}
7273
7274
7275/** Opcode 0xd8 !11/0. */
7276FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7277{
7278 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7279 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7280}
7281
7282
7283/** Opcode 0xd8 !11/1. */
7284FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7285{
7286 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7287 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7288}
7289
7290
7291/** Opcode 0xd8 !11/2. */
7292FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7293{
7294 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7295
7296 IEM_MC_BEGIN(3, 3);
7297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7298 IEM_MC_LOCAL(uint16_t, u16Fsw);
7299 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7300 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7302 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7303
7304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7306
7307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7309 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7310
7311 IEM_MC_PREPARE_FPU_USAGE();
7312 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7313 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7314 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7315 IEM_MC_ELSE()
7316 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7317 IEM_MC_ENDIF();
7318 IEM_MC_ADVANCE_RIP_AND_FINISH();
7319
7320 IEM_MC_END();
7321}
7322
7323
7324/** Opcode 0xd8 !11/3. */
7325FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7326{
7327 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7328
7329 IEM_MC_BEGIN(3, 3);
7330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7331 IEM_MC_LOCAL(uint16_t, u16Fsw);
7332 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7333 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7334 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7335 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7336
7337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7339
7340 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7341 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7342 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7343
7344 IEM_MC_PREPARE_FPU_USAGE();
7345 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7346 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7347 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7348 IEM_MC_ELSE()
7349 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7350 IEM_MC_ENDIF();
7351 IEM_MC_ADVANCE_RIP_AND_FINISH();
7352
7353 IEM_MC_END();
7354}
7355
7356
7357/** Opcode 0xd8 !11/4. */
7358FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7359{
7360 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7361 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7362}
7363
7364
7365/** Opcode 0xd8 !11/5. */
7366FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7367{
7368 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7369 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7370}
7371
7372
7373/** Opcode 0xd8 !11/6. */
7374FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7375{
7376 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7377 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7378}
7379
7380
7381/** Opcode 0xd8 !11/7. */
7382FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7383{
7384 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7385 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7386}
7387
7388
7389/**
7390 * @opcode 0xd8
7391 */
7392FNIEMOP_DEF(iemOp_EscF0)
7393{
7394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7395 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7396
7397 if (IEM_IS_MODRM_REG_MODE(bRm))
7398 {
7399 switch (IEM_GET_MODRM_REG_8(bRm))
7400 {
7401 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7402 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7403 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7404 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7405 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7406 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7407 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7408 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7410 }
7411 }
7412 else
7413 {
7414 switch (IEM_GET_MODRM_REG_8(bRm))
7415 {
7416 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7417 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7418 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7419 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7420 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7421 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7422 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7423 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7425 }
7426 }
7427}
7428
7429
7430/** Opcode 0xd9 /0 mem32real
7431 * @sa iemOp_fld_m64r */
7432FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7433{
7434 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7435
7436 IEM_MC_BEGIN(2, 3);
7437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7438 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7439 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7440 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7441 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7442
7443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7445
7446 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7447 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7448 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7449
7450 IEM_MC_PREPARE_FPU_USAGE();
7451 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7452 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
7453 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7454 IEM_MC_ELSE()
7455 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7456 IEM_MC_ENDIF();
7457 IEM_MC_ADVANCE_RIP_AND_FINISH();
7458
7459 IEM_MC_END();
7460}
7461
7462
7463/** Opcode 0xd9 !11/2 mem32real */
7464FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7465{
7466 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7467 IEM_MC_BEGIN(3, 2);
7468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7469 IEM_MC_LOCAL(uint16_t, u16Fsw);
7470 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7471 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7472 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7473
7474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7478
7479 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7480 IEM_MC_PREPARE_FPU_USAGE();
7481 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7483 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7484 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7485 IEM_MC_ELSE()
7486 IEM_MC_IF_FCW_IM()
7487 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7488 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7489 IEM_MC_ENDIF();
7490 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7491 IEM_MC_ENDIF();
7492 IEM_MC_ADVANCE_RIP_AND_FINISH();
7493
7494 IEM_MC_END();
7495}
7496
7497
7498/** Opcode 0xd9 !11/3 */
7499FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7500{
7501 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7502 IEM_MC_BEGIN(3, 2);
7503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7504 IEM_MC_LOCAL(uint16_t, u16Fsw);
7505 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7506 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7507 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7508
7509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7513
7514 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7515 IEM_MC_PREPARE_FPU_USAGE();
7516 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7517 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7518 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7519 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7520 IEM_MC_ELSE()
7521 IEM_MC_IF_FCW_IM()
7522 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7523 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7524 IEM_MC_ENDIF();
7525 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7526 IEM_MC_ENDIF();
7527 IEM_MC_ADVANCE_RIP_AND_FINISH();
7528
7529 IEM_MC_END();
7530}
7531
7532
7533/** Opcode 0xd9 !11/4 */
7534FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7535{
7536 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7537 IEM_MC_BEGIN(3, 0);
7538 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7539 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7540 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7544 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7545 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7546 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7547 IEM_MC_END();
7548 return VINF_SUCCESS;
7549}
7550
7551
7552/** Opcode 0xd9 !11/5 */
7553FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7554{
7555 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7556 IEM_MC_BEGIN(1, 1);
7557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7558 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7562 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7563 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7564 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7565 IEM_MC_END();
7566 return VINF_SUCCESS;
7567}
7568
7569
7570/** Opcode 0xd9 !11/6 */
7571FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7572{
7573 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7574 IEM_MC_BEGIN(3, 0);
7575 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7576 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7577 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7580 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7581 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7582 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7583 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7584 IEM_MC_END();
7585 return VINF_SUCCESS;
7586}
7587
7588
7589/** Opcode 0xd9 !11/7 */
7590FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7591{
7592 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7593 IEM_MC_BEGIN(2, 0);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7595 IEM_MC_LOCAL(uint16_t, u16Fcw);
7596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7599 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7600 IEM_MC_FETCH_FCW(u16Fcw);
7601 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7602 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7603 IEM_MC_END();
7604}
7605
7606
7607/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7608FNIEMOP_DEF(iemOp_fnop)
7609{
7610 IEMOP_MNEMONIC(fnop, "fnop");
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612
7613 IEM_MC_BEGIN(0, 0);
7614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7616 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7617 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7618 * intel optimizations. Investigate. */
7619 IEM_MC_UPDATE_FPU_OPCODE_IP();
7620 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7621 IEM_MC_END();
7622}
7623
7624
7625/** Opcode 0xd9 11/0 stN */
7626FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7627{
7628 IEMOP_MNEMONIC(fld_stN, "fld stN");
7629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7630
7631 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7632 * indicates that it does. */
7633 IEM_MC_BEGIN(0, 2);
7634 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7635 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7638
7639 IEM_MC_PREPARE_FPU_USAGE();
7640 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm))
7641 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7642 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7643 IEM_MC_ELSE()
7644 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7645 IEM_MC_ENDIF();
7646
7647 IEM_MC_ADVANCE_RIP_AND_FINISH();
7648 IEM_MC_END();
7649}
7650
7651
7652/** Opcode 0xd9 11/3 stN */
7653FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7654{
7655 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7657
7658 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7659 * indicates that it does. */
7660 IEM_MC_BEGIN(1, 3);
7661 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7662 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7663 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7664 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
7665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7667
7668 IEM_MC_PREPARE_FPU_USAGE();
7669 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7670 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7671 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
7672 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7673 IEM_MC_ELSE()
7674 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7675 IEM_MC_ENDIF();
7676
7677 IEM_MC_ADVANCE_RIP_AND_FINISH();
7678 IEM_MC_END();
7679}
7680
7681
7682/** Opcode 0xd9 11/4, 0xdd 11/2. */
7683FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7684{
7685 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7687
7688 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7689 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
7690 if (!iDstReg)
7691 {
7692 IEM_MC_BEGIN(0, 1);
7693 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7696
7697 IEM_MC_PREPARE_FPU_USAGE();
7698 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7699 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7700 IEM_MC_ELSE()
7701 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7702 IEM_MC_ENDIF();
7703
7704 IEM_MC_ADVANCE_RIP_AND_FINISH();
7705 IEM_MC_END();
7706 }
7707 else
7708 {
7709 IEM_MC_BEGIN(0, 2);
7710 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7711 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7714
7715 IEM_MC_PREPARE_FPU_USAGE();
7716 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7717 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7718 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7719 IEM_MC_ELSE()
7720 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7721 IEM_MC_ENDIF();
7722
7723 IEM_MC_ADVANCE_RIP_AND_FINISH();
7724 IEM_MC_END();
7725 }
7726}
7727
7728
7729/**
7730 * Common worker for FPU instructions working on ST0 and replaces it with the
7731 * result, i.e. unary operators.
7732 *
7733 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7734 */
7735FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7736{
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738
7739 IEM_MC_BEGIN(2, 1);
7740 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7741 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7742 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7743
7744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7746 IEM_MC_PREPARE_FPU_USAGE();
7747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7748 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7749 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7750 IEM_MC_ELSE()
7751 IEM_MC_FPU_STACK_UNDERFLOW(0);
7752 IEM_MC_ENDIF();
7753 IEM_MC_ADVANCE_RIP_AND_FINISH();
7754
7755 IEM_MC_END();
7756}
7757
7758
7759/** Opcode 0xd9 0xe0. */
7760FNIEMOP_DEF(iemOp_fchs)
7761{
7762 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7763 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7764}
7765
7766
7767/** Opcode 0xd9 0xe1. */
7768FNIEMOP_DEF(iemOp_fabs)
7769{
7770 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7771 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7772}
7773
7774
7775/** Opcode 0xd9 0xe4. */
7776FNIEMOP_DEF(iemOp_ftst)
7777{
7778 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780
7781 IEM_MC_BEGIN(2, 1);
7782 IEM_MC_LOCAL(uint16_t, u16Fsw);
7783 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7785
7786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7787 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7788 IEM_MC_PREPARE_FPU_USAGE();
7789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7790 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
7791 IEM_MC_UPDATE_FSW(u16Fsw);
7792 IEM_MC_ELSE()
7793 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7794 IEM_MC_ENDIF();
7795 IEM_MC_ADVANCE_RIP_AND_FINISH();
7796
7797 IEM_MC_END();
7798}
7799
7800
7801/** Opcode 0xd9 0xe5. */
7802FNIEMOP_DEF(iemOp_fxam)
7803{
7804 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7806
7807 IEM_MC_BEGIN(2, 1);
7808 IEM_MC_LOCAL(uint16_t, u16Fsw);
7809 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7811
7812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7814 IEM_MC_PREPARE_FPU_USAGE();
7815 IEM_MC_REF_FPUREG(pr80Value, 0);
7816 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
7817 IEM_MC_UPDATE_FSW(u16Fsw);
7818 IEM_MC_ADVANCE_RIP_AND_FINISH();
7819
7820 IEM_MC_END();
7821}
7822
7823
7824/**
7825 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7826 *
7827 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7828 */
7829FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7830{
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832
7833 IEM_MC_BEGIN(1, 1);
7834 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7835 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7836
7837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7839 IEM_MC_PREPARE_FPU_USAGE();
7840 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7841 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7842 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7843 IEM_MC_ELSE()
7844 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7845 IEM_MC_ENDIF();
7846 IEM_MC_ADVANCE_RIP_AND_FINISH();
7847
7848 IEM_MC_END();
7849}
7850
7851
7852/** Opcode 0xd9 0xe8. */
7853FNIEMOP_DEF(iemOp_fld1)
7854{
7855 IEMOP_MNEMONIC(fld1, "fld1");
7856 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7857}
7858
7859
7860/** Opcode 0xd9 0xe9. */
7861FNIEMOP_DEF(iemOp_fldl2t)
7862{
7863 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7864 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7865}
7866
7867
7868/** Opcode 0xd9 0xea. */
7869FNIEMOP_DEF(iemOp_fldl2e)
7870{
7871 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7872 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7873}
7874
7875/** Opcode 0xd9 0xeb. */
7876FNIEMOP_DEF(iemOp_fldpi)
7877{
7878 IEMOP_MNEMONIC(fldpi, "fldpi");
7879 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7880}
7881
7882
7883/** Opcode 0xd9 0xec. */
7884FNIEMOP_DEF(iemOp_fldlg2)
7885{
7886 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7887 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7888}
7889
7890/** Opcode 0xd9 0xed. */
7891FNIEMOP_DEF(iemOp_fldln2)
7892{
7893 IEMOP_MNEMONIC(fldln2, "fldln2");
7894 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7895}
7896
7897
7898/** Opcode 0xd9 0xee. */
7899FNIEMOP_DEF(iemOp_fldz)
7900{
7901 IEMOP_MNEMONIC(fldz, "fldz");
7902 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7903}
7904
7905
7906/** Opcode 0xd9 0xf0.
7907 *
7908 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
7909 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
7910 * to produce proper results for +Inf and -Inf.
7911 *
7912 * This is probably usful in the implementation pow() and similar.
7913 */
7914FNIEMOP_DEF(iemOp_f2xm1)
7915{
7916 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7917 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7918}
7919
7920
7921/**
7922 * Common worker for FPU instructions working on STn and ST0, storing the result
7923 * in STn, and popping the stack unless IE, DE or ZE was raised.
7924 *
7925 * @param bRm Mod R/M byte.
7926 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7927 */
7928FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7929{
7930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7931
7932 IEM_MC_BEGIN(3, 1);
7933 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7934 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7935 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7936 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7937
7938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7940
7941 IEM_MC_PREPARE_FPU_USAGE();
7942 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
7943 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7944 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
7945 IEM_MC_ELSE()
7946 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
7947 IEM_MC_ENDIF();
7948 IEM_MC_ADVANCE_RIP_AND_FINISH();
7949
7950 IEM_MC_END();
7951}
7952
7953
7954/** Opcode 0xd9 0xf1. */
7955FNIEMOP_DEF(iemOp_fyl2x)
7956{
7957 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7958 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7959}
7960
7961
7962/**
7963 * Common worker for FPU instructions working on ST0 and having two outputs, one
7964 * replacing ST0 and one pushed onto the stack.
7965 *
7966 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7967 */
7968FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7969{
7970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7971
7972 IEM_MC_BEGIN(2, 1);
7973 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7974 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7976
7977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7979 IEM_MC_PREPARE_FPU_USAGE();
7980 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7981 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7982 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7983 IEM_MC_ELSE()
7984 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7985 IEM_MC_ENDIF();
7986 IEM_MC_ADVANCE_RIP_AND_FINISH();
7987
7988 IEM_MC_END();
7989}
7990
7991
7992/** Opcode 0xd9 0xf2. */
7993FNIEMOP_DEF(iemOp_fptan)
7994{
7995 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7996 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7997}
7998
7999
8000/** Opcode 0xd9 0xf3. */
8001FNIEMOP_DEF(iemOp_fpatan)
8002{
8003 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8004 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8005}
8006
8007
8008/** Opcode 0xd9 0xf4. */
8009FNIEMOP_DEF(iemOp_fxtract)
8010{
8011 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8012 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8013}
8014
8015
8016/** Opcode 0xd9 0xf5. */
8017FNIEMOP_DEF(iemOp_fprem1)
8018{
8019 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8020 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8021}
8022
8023
8024/** Opcode 0xd9 0xf6. */
8025FNIEMOP_DEF(iemOp_fdecstp)
8026{
8027 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8030 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8031 * FINCSTP and FDECSTP. */
8032
8033 IEM_MC_BEGIN(0,0);
8034
8035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8037
8038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8039 IEM_MC_FPU_STACK_DEC_TOP();
8040 IEM_MC_UPDATE_FSW_CONST(0);
8041
8042 IEM_MC_ADVANCE_RIP_AND_FINISH();
8043 IEM_MC_END();
8044}
8045
8046
8047/** Opcode 0xd9 0xf7. */
8048FNIEMOP_DEF(iemOp_fincstp)
8049{
8050 IEMOP_MNEMONIC(fincstp, "fincstp");
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8053 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8054 * FINCSTP and FDECSTP. */
8055
8056 IEM_MC_BEGIN(0,0);
8057
8058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8060
8061 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8062 IEM_MC_FPU_STACK_INC_TOP();
8063 IEM_MC_UPDATE_FSW_CONST(0);
8064
8065 IEM_MC_ADVANCE_RIP_AND_FINISH();
8066 IEM_MC_END();
8067}
8068
8069
8070/** Opcode 0xd9 0xf8. */
8071FNIEMOP_DEF(iemOp_fprem)
8072{
8073 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8074 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8075}
8076
8077
8078/** Opcode 0xd9 0xf9. */
8079FNIEMOP_DEF(iemOp_fyl2xp1)
8080{
8081 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8082 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8083}
8084
8085
8086/** Opcode 0xd9 0xfa. */
8087FNIEMOP_DEF(iemOp_fsqrt)
8088{
8089 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8090 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8091}
8092
8093
8094/** Opcode 0xd9 0xfb. */
8095FNIEMOP_DEF(iemOp_fsincos)
8096{
8097 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8098 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8099}
8100
8101
8102/** Opcode 0xd9 0xfc. */
8103FNIEMOP_DEF(iemOp_frndint)
8104{
8105 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8106 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8107}
8108
8109
8110/** Opcode 0xd9 0xfd. */
8111FNIEMOP_DEF(iemOp_fscale)
8112{
8113 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8114 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8115}
8116
8117
8118/** Opcode 0xd9 0xfe. */
8119FNIEMOP_DEF(iemOp_fsin)
8120{
8121 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8122 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8123}
8124
8125
8126/** Opcode 0xd9 0xff. */
8127FNIEMOP_DEF(iemOp_fcos)
8128{
8129 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8130 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8131}
8132
8133
8134/** Used by iemOp_EscF1. */
8135IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8136{
8137 /* 0xe0 */ iemOp_fchs,
8138 /* 0xe1 */ iemOp_fabs,
8139 /* 0xe2 */ iemOp_Invalid,
8140 /* 0xe3 */ iemOp_Invalid,
8141 /* 0xe4 */ iemOp_ftst,
8142 /* 0xe5 */ iemOp_fxam,
8143 /* 0xe6 */ iemOp_Invalid,
8144 /* 0xe7 */ iemOp_Invalid,
8145 /* 0xe8 */ iemOp_fld1,
8146 /* 0xe9 */ iemOp_fldl2t,
8147 /* 0xea */ iemOp_fldl2e,
8148 /* 0xeb */ iemOp_fldpi,
8149 /* 0xec */ iemOp_fldlg2,
8150 /* 0xed */ iemOp_fldln2,
8151 /* 0xee */ iemOp_fldz,
8152 /* 0xef */ iemOp_Invalid,
8153 /* 0xf0 */ iemOp_f2xm1,
8154 /* 0xf1 */ iemOp_fyl2x,
8155 /* 0xf2 */ iemOp_fptan,
8156 /* 0xf3 */ iemOp_fpatan,
8157 /* 0xf4 */ iemOp_fxtract,
8158 /* 0xf5 */ iemOp_fprem1,
8159 /* 0xf6 */ iemOp_fdecstp,
8160 /* 0xf7 */ iemOp_fincstp,
8161 /* 0xf8 */ iemOp_fprem,
8162 /* 0xf9 */ iemOp_fyl2xp1,
8163 /* 0xfa */ iemOp_fsqrt,
8164 /* 0xfb */ iemOp_fsincos,
8165 /* 0xfc */ iemOp_frndint,
8166 /* 0xfd */ iemOp_fscale,
8167 /* 0xfe */ iemOp_fsin,
8168 /* 0xff */ iemOp_fcos
8169};
8170
8171
8172/**
8173 * @opcode 0xd9
8174 */
8175FNIEMOP_DEF(iemOp_EscF1)
8176{
8177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8178 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8179
8180 if (IEM_IS_MODRM_REG_MODE(bRm))
8181 {
8182 switch (IEM_GET_MODRM_REG_8(bRm))
8183 {
8184 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8185 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8186 case 2:
8187 if (bRm == 0xd0)
8188 return FNIEMOP_CALL(iemOp_fnop);
8189 return IEMOP_RAISE_INVALID_OPCODE();
8190 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8191 case 4:
8192 case 5:
8193 case 6:
8194 case 7:
8195 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8196 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8198 }
8199 }
8200 else
8201 {
8202 switch (IEM_GET_MODRM_REG_8(bRm))
8203 {
8204 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8205 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8206 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8207 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8208 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8209 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8210 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8211 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8213 }
8214 }
8215}
8216
8217
8218/** Opcode 0xda 11/0. */
8219FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8220{
8221 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223
8224 IEM_MC_BEGIN(0, 1);
8225 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8226
8227 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8228 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8229
8230 IEM_MC_PREPARE_FPU_USAGE();
8231 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8233 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8234 IEM_MC_ENDIF();
8235 IEM_MC_UPDATE_FPU_OPCODE_IP();
8236 IEM_MC_ELSE()
8237 IEM_MC_FPU_STACK_UNDERFLOW(0);
8238 IEM_MC_ENDIF();
8239 IEM_MC_ADVANCE_RIP_AND_FINISH();
8240
8241 IEM_MC_END();
8242}
8243
8244
8245/** Opcode 0xda 11/1. */
8246FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8247{
8248 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8250
8251 IEM_MC_BEGIN(0, 1);
8252 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8253
8254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8255 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8256
8257 IEM_MC_PREPARE_FPU_USAGE();
8258 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8259 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8260 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8261 IEM_MC_ENDIF();
8262 IEM_MC_UPDATE_FPU_OPCODE_IP();
8263 IEM_MC_ELSE()
8264 IEM_MC_FPU_STACK_UNDERFLOW(0);
8265 IEM_MC_ENDIF();
8266 IEM_MC_ADVANCE_RIP_AND_FINISH();
8267
8268 IEM_MC_END();
8269}
8270
8271
8272/** Opcode 0xda 11/2. */
8273FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8274{
8275 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8277
8278 IEM_MC_BEGIN(0, 1);
8279 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8280
8281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8283
8284 IEM_MC_PREPARE_FPU_USAGE();
8285 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8286 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8287 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8288 IEM_MC_ENDIF();
8289 IEM_MC_UPDATE_FPU_OPCODE_IP();
8290 IEM_MC_ELSE()
8291 IEM_MC_FPU_STACK_UNDERFLOW(0);
8292 IEM_MC_ENDIF();
8293 IEM_MC_ADVANCE_RIP_AND_FINISH();
8294
8295 IEM_MC_END();
8296}
8297
8298
8299/** Opcode 0xda 11/3. */
8300FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8301{
8302 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8304
8305 IEM_MC_BEGIN(0, 1);
8306 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8307
8308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8310
8311 IEM_MC_PREPARE_FPU_USAGE();
8312 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8314 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8315 IEM_MC_ENDIF();
8316 IEM_MC_UPDATE_FPU_OPCODE_IP();
8317 IEM_MC_ELSE()
8318 IEM_MC_FPU_STACK_UNDERFLOW(0);
8319 IEM_MC_ENDIF();
8320 IEM_MC_ADVANCE_RIP_AND_FINISH();
8321
8322 IEM_MC_END();
8323}
8324
8325
8326/**
8327 * Common worker for FPU instructions working on ST0 and ST1, only affecting
8328 * flags, and popping twice when done.
8329 *
8330 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8331 */
8332FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8333{
8334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8335
8336 IEM_MC_BEGIN(3, 1);
8337 IEM_MC_LOCAL(uint16_t, u16Fsw);
8338 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8339 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8340 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8341
8342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8344
8345 IEM_MC_PREPARE_FPU_USAGE();
8346 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8347 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8348 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8349 IEM_MC_ELSE()
8350 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8351 IEM_MC_ENDIF();
8352 IEM_MC_ADVANCE_RIP_AND_FINISH();
8353
8354 IEM_MC_END();
8355}
8356
8357
8358/** Opcode 0xda 0xe9. */
8359FNIEMOP_DEF(iemOp_fucompp)
8360{
8361 IEMOP_MNEMONIC(fucompp, "fucompp");
8362 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
8363}
8364
8365
8366/**
8367 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8368 * the result in ST0.
8369 *
8370 * @param bRm Mod R/M byte.
8371 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8372 */
8373FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8374{
8375 IEM_MC_BEGIN(3, 3);
8376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8377 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8378 IEM_MC_LOCAL(int32_t, i32Val2);
8379 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8380 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8381 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8382
8383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8385
8386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8388 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8389
8390 IEM_MC_PREPARE_FPU_USAGE();
8391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8392 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8393 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8394 IEM_MC_ELSE()
8395 IEM_MC_FPU_STACK_UNDERFLOW(0);
8396 IEM_MC_ENDIF();
8397 IEM_MC_ADVANCE_RIP_AND_FINISH();
8398
8399 IEM_MC_END();
8400}
8401
8402
8403/** Opcode 0xda !11/0. */
8404FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8405{
8406 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8407 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8408}
8409
8410
8411/** Opcode 0xda !11/1. */
8412FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8413{
8414 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8415 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8416}
8417
8418
8419/** Opcode 0xda !11/2. */
8420FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8421{
8422 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8423
8424 IEM_MC_BEGIN(3, 3);
8425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8426 IEM_MC_LOCAL(uint16_t, u16Fsw);
8427 IEM_MC_LOCAL(int32_t, i32Val2);
8428 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8429 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8430 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8434
8435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8436 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8437 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8438
8439 IEM_MC_PREPARE_FPU_USAGE();
8440 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8441 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8442 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8443 IEM_MC_ELSE()
8444 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8445 IEM_MC_ENDIF();
8446 IEM_MC_ADVANCE_RIP_AND_FINISH();
8447
8448 IEM_MC_END();
8449}
8450
8451
8452/** Opcode 0xda !11/3. */
8453FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8454{
8455 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8456
8457 IEM_MC_BEGIN(3, 3);
8458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8459 IEM_MC_LOCAL(uint16_t, u16Fsw);
8460 IEM_MC_LOCAL(int32_t, i32Val2);
8461 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8462 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8463 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8464
8465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8467
8468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8470 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8471
8472 IEM_MC_PREPARE_FPU_USAGE();
8473 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8474 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8475 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8476 IEM_MC_ELSE()
8477 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8478 IEM_MC_ENDIF();
8479 IEM_MC_ADVANCE_RIP_AND_FINISH();
8480
8481 IEM_MC_END();
8482}
8483
8484
8485/** Opcode 0xda !11/4. */
8486FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8487{
8488 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8489 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8490}
8491
8492
8493/** Opcode 0xda !11/5. */
8494FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8495{
8496 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8497 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8498}
8499
8500
8501/** Opcode 0xda !11/6. */
8502FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8503{
8504 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8505 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8506}
8507
8508
8509/** Opcode 0xda !11/7. */
8510FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8511{
8512 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8513 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8514}
8515
8516
8517/**
8518 * @opcode 0xda
8519 */
8520FNIEMOP_DEF(iemOp_EscF2)
8521{
8522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8523 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8524 if (IEM_IS_MODRM_REG_MODE(bRm))
8525 {
8526 switch (IEM_GET_MODRM_REG_8(bRm))
8527 {
8528 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8529 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8530 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8531 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8532 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8533 case 5:
8534 if (bRm == 0xe9)
8535 return FNIEMOP_CALL(iemOp_fucompp);
8536 return IEMOP_RAISE_INVALID_OPCODE();
8537 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8538 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8540 }
8541 }
8542 else
8543 {
8544 switch (IEM_GET_MODRM_REG_8(bRm))
8545 {
8546 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8547 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8548 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8549 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8550 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8551 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8552 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8553 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8555 }
8556 }
8557}
8558
8559
8560/** Opcode 0xdb !11/0. */
8561FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8562{
8563 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8564
8565 IEM_MC_BEGIN(2, 3);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8567 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8568 IEM_MC_LOCAL(int32_t, i32Val);
8569 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8570 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8571
8572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574
8575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8577 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8578
8579 IEM_MC_PREPARE_FPU_USAGE();
8580 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8581 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
8582 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8583 IEM_MC_ELSE()
8584 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8585 IEM_MC_ENDIF();
8586 IEM_MC_ADVANCE_RIP_AND_FINISH();
8587
8588 IEM_MC_END();
8589}
8590
8591
8592/** Opcode 0xdb !11/1. */
8593FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8594{
8595 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8596 IEM_MC_BEGIN(3, 2);
8597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8598 IEM_MC_LOCAL(uint16_t, u16Fsw);
8599 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8600 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8601 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8602
8603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8605 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8606 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8607
8608 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8609 IEM_MC_PREPARE_FPU_USAGE();
8610 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8611 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8612 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8613 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8614 IEM_MC_ELSE()
8615 IEM_MC_IF_FCW_IM()
8616 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8617 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8618 IEM_MC_ENDIF();
8619 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8620 IEM_MC_ENDIF();
8621 IEM_MC_ADVANCE_RIP_AND_FINISH();
8622
8623 IEM_MC_END();
8624}
8625
8626
8627/** Opcode 0xdb !11/2. */
8628FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8629{
8630 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8631 IEM_MC_BEGIN(3, 2);
8632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8633 IEM_MC_LOCAL(uint16_t, u16Fsw);
8634 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8635 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8636 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8637
8638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8642
8643 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8644 IEM_MC_PREPARE_FPU_USAGE();
8645 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8646 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8647 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8648 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8649 IEM_MC_ELSE()
8650 IEM_MC_IF_FCW_IM()
8651 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8652 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8653 IEM_MC_ENDIF();
8654 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8655 IEM_MC_ENDIF();
8656 IEM_MC_ADVANCE_RIP_AND_FINISH();
8657
8658 IEM_MC_END();
8659}
8660
8661
8662/** Opcode 0xdb !11/3. */
8663FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8664{
8665 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8666 IEM_MC_BEGIN(3, 2);
8667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8668 IEM_MC_LOCAL(uint16_t, u16Fsw);
8669 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8670 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8671 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8672
8673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8677
8678 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8679 IEM_MC_PREPARE_FPU_USAGE();
8680 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8681 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8682 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8683 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8684 IEM_MC_ELSE()
8685 IEM_MC_IF_FCW_IM()
8686 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8687 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8688 IEM_MC_ENDIF();
8689 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8690 IEM_MC_ENDIF();
8691 IEM_MC_ADVANCE_RIP_AND_FINISH();
8692
8693 IEM_MC_END();
8694}
8695
8696
8697/** Opcode 0xdb !11/5. */
8698FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8699{
8700 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8701
8702 IEM_MC_BEGIN(2, 3);
8703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8704 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8705 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8706 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8707 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8708
8709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8711
8712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8714 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8715
8716 IEM_MC_PREPARE_FPU_USAGE();
8717 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8718 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8719 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8720 IEM_MC_ELSE()
8721 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8722 IEM_MC_ENDIF();
8723 IEM_MC_ADVANCE_RIP_AND_FINISH();
8724
8725 IEM_MC_END();
8726}
8727
8728
8729/** Opcode 0xdb !11/7. */
8730FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8731{
8732 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8733 IEM_MC_BEGIN(3, 2);
8734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8735 IEM_MC_LOCAL(uint16_t, u16Fsw);
8736 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8737 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8738 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8739
8740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8744
8745 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
8746 IEM_MC_PREPARE_FPU_USAGE();
8747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8748 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8749 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8750 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8751 IEM_MC_ELSE()
8752 IEM_MC_IF_FCW_IM()
8753 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8754 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8755 IEM_MC_ENDIF();
8756 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8757 IEM_MC_ENDIF();
8758 IEM_MC_ADVANCE_RIP_AND_FINISH();
8759
8760 IEM_MC_END();
8761}
8762
8763
8764/** Opcode 0xdb 11/0. */
8765FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8766{
8767 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8769
8770 IEM_MC_BEGIN(0, 1);
8771 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8772
8773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8775
8776 IEM_MC_PREPARE_FPU_USAGE();
8777 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8778 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8779 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8780 IEM_MC_ENDIF();
8781 IEM_MC_UPDATE_FPU_OPCODE_IP();
8782 IEM_MC_ELSE()
8783 IEM_MC_FPU_STACK_UNDERFLOW(0);
8784 IEM_MC_ENDIF();
8785 IEM_MC_ADVANCE_RIP_AND_FINISH();
8786
8787 IEM_MC_END();
8788}
8789
8790
8791/** Opcode 0xdb 11/1. */
8792FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8793{
8794 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8796
8797 IEM_MC_BEGIN(0, 1);
8798 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8799
8800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8802
8803 IEM_MC_PREPARE_FPU_USAGE();
8804 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8805 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8806 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8807 IEM_MC_ENDIF();
8808 IEM_MC_UPDATE_FPU_OPCODE_IP();
8809 IEM_MC_ELSE()
8810 IEM_MC_FPU_STACK_UNDERFLOW(0);
8811 IEM_MC_ENDIF();
8812 IEM_MC_ADVANCE_RIP_AND_FINISH();
8813
8814 IEM_MC_END();
8815}
8816
8817
8818/** Opcode 0xdb 11/2. */
8819FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8820{
8821 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8823
8824 IEM_MC_BEGIN(0, 1);
8825 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8826
8827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8829
8830 IEM_MC_PREPARE_FPU_USAGE();
8831 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8832 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8833 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8834 IEM_MC_ENDIF();
8835 IEM_MC_UPDATE_FPU_OPCODE_IP();
8836 IEM_MC_ELSE()
8837 IEM_MC_FPU_STACK_UNDERFLOW(0);
8838 IEM_MC_ENDIF();
8839 IEM_MC_ADVANCE_RIP_AND_FINISH();
8840
8841 IEM_MC_END();
8842}
8843
8844
8845/** Opcode 0xdb 11/3. */
8846FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8847{
8848 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8850
8851 IEM_MC_BEGIN(0, 1);
8852 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8853
8854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8856
8857 IEM_MC_PREPARE_FPU_USAGE();
8858 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8859 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8860 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8861 IEM_MC_ENDIF();
8862 IEM_MC_UPDATE_FPU_OPCODE_IP();
8863 IEM_MC_ELSE()
8864 IEM_MC_FPU_STACK_UNDERFLOW(0);
8865 IEM_MC_ENDIF();
8866 IEM_MC_ADVANCE_RIP_AND_FINISH();
8867
8868 IEM_MC_END();
8869}
8870
8871
8872/** Opcode 0xdb 0xe0. */
8873FNIEMOP_DEF(iemOp_fneni)
8874{
8875 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8877 IEM_MC_BEGIN(0,0);
8878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8879 IEM_MC_ADVANCE_RIP_AND_FINISH();
8880 IEM_MC_END();
8881}
8882
8883
8884/** Opcode 0xdb 0xe1. */
8885FNIEMOP_DEF(iemOp_fndisi)
8886{
8887 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8889 IEM_MC_BEGIN(0,0);
8890 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8891 IEM_MC_ADVANCE_RIP_AND_FINISH();
8892 IEM_MC_END();
8893}
8894
8895
8896/** Opcode 0xdb 0xe2. */
8897FNIEMOP_DEF(iemOp_fnclex)
8898{
8899 IEMOP_MNEMONIC(fnclex, "fnclex");
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901
8902 IEM_MC_BEGIN(0,0);
8903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8904 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8905 IEM_MC_CLEAR_FSW_EX();
8906 IEM_MC_ADVANCE_RIP_AND_FINISH();
8907 IEM_MC_END();
8908}
8909
8910
8911/** Opcode 0xdb 0xe3. */
8912FNIEMOP_DEF(iemOp_fninit)
8913{
8914 IEMOP_MNEMONIC(fninit, "fninit");
8915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8916 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8917}
8918
8919
8920/** Opcode 0xdb 0xe4. */
8921FNIEMOP_DEF(iemOp_fnsetpm)
8922{
8923 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8925 IEM_MC_BEGIN(0,0);
8926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8927 IEM_MC_ADVANCE_RIP_AND_FINISH();
8928 IEM_MC_END();
8929}
8930
8931
8932/** Opcode 0xdb 0xe5. */
8933FNIEMOP_DEF(iemOp_frstpm)
8934{
8935 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8936#if 0 /* #UDs on newer CPUs */
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_BEGIN(0,0);
8939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8940 IEM_MC_ADVANCE_RIP_AND_FINISH();
8941 IEM_MC_END();
8942 return VINF_SUCCESS;
8943#else
8944 return IEMOP_RAISE_INVALID_OPCODE();
8945#endif
8946}
8947
8948
8949/** Opcode 0xdb 11/5. */
8950FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8951{
8952 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8953 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8954}
8955
8956
8957/** Opcode 0xdb 11/6. */
8958FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8959{
8960 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8961 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8962}
8963
8964
8965/**
8966 * @opcode 0xdb
8967 */
8968FNIEMOP_DEF(iemOp_EscF3)
8969{
8970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8971 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8972 if (IEM_IS_MODRM_REG_MODE(bRm))
8973 {
8974 switch (IEM_GET_MODRM_REG_8(bRm))
8975 {
8976 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8977 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8978 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8979 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8980 case 4:
8981 switch (bRm)
8982 {
8983 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8984 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8985 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8986 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8987 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8988 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8989 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8990 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8992 }
8993 break;
8994 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8995 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8996 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8998 }
8999 }
9000 else
9001 {
9002 switch (IEM_GET_MODRM_REG_8(bRm))
9003 {
9004 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9005 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9006 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9007 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9008 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9009 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9010 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9011 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9013 }
9014 }
9015}
9016
9017
9018/**
9019 * Common worker for FPU instructions working on STn and ST0, and storing the
9020 * result in STn unless IE, DE or ZE was raised.
9021 *
9022 * @param bRm Mod R/M byte.
9023 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9024 */
9025FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9026{
9027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9028
9029 IEM_MC_BEGIN(3, 1);
9030 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9031 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9032 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9034
9035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9037
9038 IEM_MC_PREPARE_FPU_USAGE();
9039 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
9040 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9041 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9042 IEM_MC_ELSE()
9043 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9044 IEM_MC_ENDIF();
9045 IEM_MC_ADVANCE_RIP_AND_FINISH();
9046
9047 IEM_MC_END();
9048}
9049
9050
9051/** Opcode 0xdc 11/0. */
9052FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9053{
9054 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9055 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9056}
9057
9058
9059/** Opcode 0xdc 11/1. */
9060FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9061{
9062 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9063 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9064}
9065
9066
9067/** Opcode 0xdc 11/4. */
9068FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9069{
9070 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9071 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9072}
9073
9074
9075/** Opcode 0xdc 11/5. */
9076FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9077{
9078 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9079 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9080}
9081
9082
9083/** Opcode 0xdc 11/6. */
9084FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9085{
9086 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9087 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9088}
9089
9090
9091/** Opcode 0xdc 11/7. */
9092FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9093{
9094 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9095 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9096}
9097
9098
9099/**
9100 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9101 * memory operand, and storing the result in ST0.
9102 *
9103 * @param bRm Mod R/M byte.
9104 * @param pfnImpl Pointer to the instruction implementation (assembly).
9105 */
9106FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9107{
9108 IEM_MC_BEGIN(3, 3);
9109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9110 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9111 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9112 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9113 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9114 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9115
9116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9120
9121 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9122 IEM_MC_PREPARE_FPU_USAGE();
9123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9124 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9125 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9126 IEM_MC_ELSE()
9127 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9128 IEM_MC_ENDIF();
9129 IEM_MC_ADVANCE_RIP_AND_FINISH();
9130
9131 IEM_MC_END();
9132}
9133
9134
9135/** Opcode 0xdc !11/0. */
9136FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9137{
9138 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9139 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9140}
9141
9142
9143/** Opcode 0xdc !11/1. */
9144FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9145{
9146 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9147 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9148}
9149
9150
9151/** Opcode 0xdc !11/2. */
9152FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9153{
9154 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9155
9156 IEM_MC_BEGIN(3, 3);
9157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9158 IEM_MC_LOCAL(uint16_t, u16Fsw);
9159 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9160 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9161 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9162 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9163
9164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9166
9167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9169 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9170
9171 IEM_MC_PREPARE_FPU_USAGE();
9172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9173 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9174 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9175 IEM_MC_ELSE()
9176 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9177 IEM_MC_ENDIF();
9178 IEM_MC_ADVANCE_RIP_AND_FINISH();
9179
9180 IEM_MC_END();
9181}
9182
9183
9184/** Opcode 0xdc !11/3. */
9185FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9186{
9187 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9188
9189 IEM_MC_BEGIN(3, 3);
9190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9191 IEM_MC_LOCAL(uint16_t, u16Fsw);
9192 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9193 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9194 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9195 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9196
9197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9199
9200 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9201 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9202 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9203
9204 IEM_MC_PREPARE_FPU_USAGE();
9205 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9206 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9207 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9208 IEM_MC_ELSE()
9209 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9210 IEM_MC_ENDIF();
9211 IEM_MC_ADVANCE_RIP_AND_FINISH();
9212
9213 IEM_MC_END();
9214}
9215
9216
9217/** Opcode 0xdc !11/4. */
9218FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9219{
9220 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9221 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9222}
9223
9224
9225/** Opcode 0xdc !11/5. */
9226FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9227{
9228 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9229 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9230}
9231
9232
9233/** Opcode 0xdc !11/6. */
9234FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9235{
9236 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9237 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9238}
9239
9240
9241/** Opcode 0xdc !11/7. */
9242FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9243{
9244 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9245 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9246}
9247
9248
9249/**
9250 * @opcode 0xdc
9251 */
9252FNIEMOP_DEF(iemOp_EscF4)
9253{
9254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9255 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9256 if (IEM_IS_MODRM_REG_MODE(bRm))
9257 {
9258 switch (IEM_GET_MODRM_REG_8(bRm))
9259 {
9260 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9261 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9262 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9263 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9264 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9265 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9266 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9267 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9269 }
9270 }
9271 else
9272 {
9273 switch (IEM_GET_MODRM_REG_8(bRm))
9274 {
9275 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9276 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9277 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9278 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9279 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9280 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9281 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9282 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9284 }
9285 }
9286}
9287
9288
9289/** Opcode 0xdd !11/0.
9290 * @sa iemOp_fld_m32r */
9291FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9292{
9293 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9294
9295 IEM_MC_BEGIN(2, 3);
9296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9298 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9300 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9301
9302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9306
9307 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9308 IEM_MC_PREPARE_FPU_USAGE();
9309 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9310 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
9311 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9312 IEM_MC_ELSE()
9313 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9314 IEM_MC_ENDIF();
9315 IEM_MC_ADVANCE_RIP_AND_FINISH();
9316
9317 IEM_MC_END();
9318}
9319
9320
9321/** Opcode 0xdd !11/0. */
9322FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9323{
9324 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9325 IEM_MC_BEGIN(3, 2);
9326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9327 IEM_MC_LOCAL(uint16_t, u16Fsw);
9328 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9329 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9331
9332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9336
9337 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9338 IEM_MC_PREPARE_FPU_USAGE();
9339 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9340 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9341 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9342 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9343 IEM_MC_ELSE()
9344 IEM_MC_IF_FCW_IM()
9345 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9346 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9347 IEM_MC_ENDIF();
9348 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9349 IEM_MC_ENDIF();
9350 IEM_MC_ADVANCE_RIP_AND_FINISH();
9351
9352 IEM_MC_END();
9353}
9354
9355
9356/** Opcode 0xdd !11/0. */
9357FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9358{
9359 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9360 IEM_MC_BEGIN(3, 2);
9361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9362 IEM_MC_LOCAL(uint16_t, u16Fsw);
9363 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9364 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9366
9367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9371
9372 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9373 IEM_MC_PREPARE_FPU_USAGE();
9374 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9375 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9376 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9377 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9378 IEM_MC_ELSE()
9379 IEM_MC_IF_FCW_IM()
9380 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9381 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9382 IEM_MC_ENDIF();
9383 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9384 IEM_MC_ENDIF();
9385 IEM_MC_ADVANCE_RIP_AND_FINISH();
9386
9387 IEM_MC_END();
9388}
9389
9390
9391
9392
9393/** Opcode 0xdd !11/0. */
9394FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9395{
9396 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9397 IEM_MC_BEGIN(3, 2);
9398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9399 IEM_MC_LOCAL(uint16_t, u16Fsw);
9400 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9401 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9402 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9403
9404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9408
9409 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9410 IEM_MC_PREPARE_FPU_USAGE();
9411 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9412 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9413 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9414 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9415 IEM_MC_ELSE()
9416 IEM_MC_IF_FCW_IM()
9417 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9418 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9419 IEM_MC_ENDIF();
9420 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9421 IEM_MC_ENDIF();
9422 IEM_MC_ADVANCE_RIP_AND_FINISH();
9423
9424 IEM_MC_END();
9425}
9426
9427
9428/** Opcode 0xdd !11/0. */
9429FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9430{
9431 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9432 IEM_MC_BEGIN(3, 0);
9433 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9434 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9435 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9440 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9441 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9442 IEM_MC_END();
9443 return VINF_SUCCESS;
9444}
9445
9446
9447/** Opcode 0xdd !11/0. */
9448FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9449{
9450 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9451 IEM_MC_BEGIN(3, 0);
9452 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9453 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9454 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9458 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
9459 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9460 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9461 IEM_MC_END();
9462 return VINF_SUCCESS;
9463
9464}
9465
9466/** Opcode 0xdd !11/0. */
9467FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9468{
9469 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9470
9471 IEM_MC_BEGIN(0, 2);
9472 IEM_MC_LOCAL(uint16_t, u16Tmp);
9473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9474
9475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9478
9479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9480 IEM_MC_FETCH_FSW(u16Tmp);
9481 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9482 IEM_MC_ADVANCE_RIP_AND_FINISH();
9483
9484/** @todo Debug / drop a hint to the verifier that things may differ
9485 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9486 * NT4SP1. (X86_FSW_PE) */
9487 IEM_MC_END();
9488}
9489
9490
9491/** Opcode 0xdd 11/0. */
9492FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9493{
9494 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9497 unmodified. */
9498
9499 IEM_MC_BEGIN(0, 0);
9500
9501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9503
9504 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9505 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9506 IEM_MC_UPDATE_FPU_OPCODE_IP();
9507
9508 IEM_MC_ADVANCE_RIP_AND_FINISH();
9509 IEM_MC_END();
9510}
9511
9512
9513/** Opcode 0xdd 11/1. */
9514FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9515{
9516 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9518
9519 IEM_MC_BEGIN(0, 2);
9520 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9521 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9524
9525 IEM_MC_PREPARE_FPU_USAGE();
9526 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9527 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9528 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9529 IEM_MC_ELSE()
9530 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9531 IEM_MC_ENDIF();
9532
9533 IEM_MC_ADVANCE_RIP_AND_FINISH();
9534 IEM_MC_END();
9535}
9536
9537
9538/** Opcode 0xdd 11/3. */
9539FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9540{
9541 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9542 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9543}
9544
9545
9546/** Opcode 0xdd 11/4. */
9547FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9548{
9549 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9550 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9551}
9552
9553
9554/**
9555 * @opcode 0xdd
9556 */
9557FNIEMOP_DEF(iemOp_EscF5)
9558{
9559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9560 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9561 if (IEM_IS_MODRM_REG_MODE(bRm))
9562 {
9563 switch (IEM_GET_MODRM_REG_8(bRm))
9564 {
9565 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9566 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9567 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9568 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9569 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9570 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9571 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9572 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9574 }
9575 }
9576 else
9577 {
9578 switch (IEM_GET_MODRM_REG_8(bRm))
9579 {
9580 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9581 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9582 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9583 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9584 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9585 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9586 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9587 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9589 }
9590 }
9591}
9592
9593
9594/** Opcode 0xde 11/0. */
9595FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9596{
9597 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9598 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9599}
9600
9601
9602/** Opcode 0xde 11/0. */
9603FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9604{
9605 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9606 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9607}
9608
9609
9610/** Opcode 0xde 0xd9. */
9611FNIEMOP_DEF(iemOp_fcompp)
9612{
9613 IEMOP_MNEMONIC(fcompp, "fcompp");
9614 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
9615}
9616
9617
9618/** Opcode 0xde 11/4. */
9619FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9620{
9621 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9622 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9623}
9624
9625
9626/** Opcode 0xde 11/5. */
9627FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9628{
9629 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9630 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9631}
9632
9633
9634/** Opcode 0xde 11/6. */
9635FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9636{
9637 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9638 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9639}
9640
9641
9642/** Opcode 0xde 11/7. */
9643FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9644{
9645 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9646 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9647}
9648
9649
9650/**
9651 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9652 * the result in ST0.
9653 *
9654 * @param bRm Mod R/M byte.
9655 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9656 */
9657FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9658{
9659 IEM_MC_BEGIN(3, 3);
9660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9661 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9662 IEM_MC_LOCAL(int16_t, i16Val2);
9663 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9664 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9665 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9666
9667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9669
9670 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9671 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9672 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9673
9674 IEM_MC_PREPARE_FPU_USAGE();
9675 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9676 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9677 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9678 IEM_MC_ELSE()
9679 IEM_MC_FPU_STACK_UNDERFLOW(0);
9680 IEM_MC_ENDIF();
9681 IEM_MC_ADVANCE_RIP_AND_FINISH();
9682
9683 IEM_MC_END();
9684}
9685
9686
9687/** Opcode 0xde !11/0. */
9688FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9689{
9690 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9691 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9692}
9693
9694
9695/** Opcode 0xde !11/1. */
9696FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9697{
9698 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9699 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9700}
9701
9702
9703/** Opcode 0xde !11/2. */
9704FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9705{
9706 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9707
9708 IEM_MC_BEGIN(3, 3);
9709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9710 IEM_MC_LOCAL(uint16_t, u16Fsw);
9711 IEM_MC_LOCAL(int16_t, i16Val2);
9712 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9713 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9714 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9715
9716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9718
9719 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9720 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9721 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9722
9723 IEM_MC_PREPARE_FPU_USAGE();
9724 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9725 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9726 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9727 IEM_MC_ELSE()
9728 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9729 IEM_MC_ENDIF();
9730 IEM_MC_ADVANCE_RIP_AND_FINISH();
9731
9732 IEM_MC_END();
9733}
9734
9735
9736/** Opcode 0xde !11/3. */
9737FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9738{
9739 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9740
9741 IEM_MC_BEGIN(3, 3);
9742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9743 IEM_MC_LOCAL(uint16_t, u16Fsw);
9744 IEM_MC_LOCAL(int16_t, i16Val2);
9745 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9746 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9747 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9748
9749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9751
9752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9753 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9754 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9755
9756 IEM_MC_PREPARE_FPU_USAGE();
9757 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9758 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9759 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9760 IEM_MC_ELSE()
9761 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9762 IEM_MC_ENDIF();
9763 IEM_MC_ADVANCE_RIP_AND_FINISH();
9764
9765 IEM_MC_END();
9766}
9767
9768
9769/** Opcode 0xde !11/4. */
9770FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9771{
9772 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9773 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9774}
9775
9776
9777/** Opcode 0xde !11/5. */
9778FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9779{
9780 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9781 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9782}
9783
9784
9785/** Opcode 0xde !11/6. */
9786FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9787{
9788 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9789 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9790}
9791
9792
9793/** Opcode 0xde !11/7. */
9794FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9795{
9796 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9797 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9798}
9799
9800
9801/**
9802 * @opcode 0xde
9803 */
9804FNIEMOP_DEF(iemOp_EscF6)
9805{
9806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9807 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9808 if (IEM_IS_MODRM_REG_MODE(bRm))
9809 {
9810 switch (IEM_GET_MODRM_REG_8(bRm))
9811 {
9812 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9813 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9814 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9815 case 3: if (bRm == 0xd9)
9816 return FNIEMOP_CALL(iemOp_fcompp);
9817 return IEMOP_RAISE_INVALID_OPCODE();
9818 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9819 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9820 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9821 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9823 }
9824 }
9825 else
9826 {
9827 switch (IEM_GET_MODRM_REG_8(bRm))
9828 {
9829 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9830 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9831 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9832 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9833 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9834 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9835 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9836 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9838 }
9839 }
9840}
9841
9842
9843/** Opcode 0xdf 11/0.
9844 * Undocument instruction, assumed to work like ffree + fincstp. */
9845FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9846{
9847 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9849
9850 IEM_MC_BEGIN(0, 0);
9851
9852 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9853 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9854
9855 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9856 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9857 IEM_MC_FPU_STACK_INC_TOP();
9858 IEM_MC_UPDATE_FPU_OPCODE_IP();
9859
9860 IEM_MC_ADVANCE_RIP_AND_FINISH();
9861 IEM_MC_END();
9862}
9863
9864
9865/** Opcode 0xdf 0xe0. */
9866FNIEMOP_DEF(iemOp_fnstsw_ax)
9867{
9868 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9870
9871 IEM_MC_BEGIN(0, 1);
9872 IEM_MC_LOCAL(uint16_t, u16Tmp);
9873 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9874 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9875 IEM_MC_FETCH_FSW(u16Tmp);
9876 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9877 IEM_MC_ADVANCE_RIP_AND_FINISH();
9878 IEM_MC_END();
9879}
9880
9881
9882/** Opcode 0xdf 11/5. */
9883FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9884{
9885 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9886 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9887}
9888
9889
9890/** Opcode 0xdf 11/6. */
9891FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9892{
9893 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9894 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9895}
9896
9897
9898/** Opcode 0xdf !11/0. */
9899FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9900{
9901 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9902
9903 IEM_MC_BEGIN(2, 3);
9904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9905 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9906 IEM_MC_LOCAL(int16_t, i16Val);
9907 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9908 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9909
9910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9912
9913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9915 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9916
9917 IEM_MC_PREPARE_FPU_USAGE();
9918 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9919 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
9920 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9921 IEM_MC_ELSE()
9922 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9923 IEM_MC_ENDIF();
9924 IEM_MC_ADVANCE_RIP_AND_FINISH();
9925
9926 IEM_MC_END();
9927}
9928
9929
9930/** Opcode 0xdf !11/1. */
9931FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9932{
9933 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9934 IEM_MC_BEGIN(3, 2);
9935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9936 IEM_MC_LOCAL(uint16_t, u16Fsw);
9937 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9938 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9939 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9940
9941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9945
9946 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9947 IEM_MC_PREPARE_FPU_USAGE();
9948 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9949 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9950 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9951 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9952 IEM_MC_ELSE()
9953 IEM_MC_IF_FCW_IM()
9954 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9955 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9956 IEM_MC_ENDIF();
9957 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9958 IEM_MC_ENDIF();
9959 IEM_MC_ADVANCE_RIP_AND_FINISH();
9960
9961 IEM_MC_END();
9962}
9963
9964
9965/** Opcode 0xdf !11/2. */
9966FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9967{
9968 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9969 IEM_MC_BEGIN(3, 2);
9970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9971 IEM_MC_LOCAL(uint16_t, u16Fsw);
9972 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9973 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9974 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9975
9976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9979 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9980
9981 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9982 IEM_MC_PREPARE_FPU_USAGE();
9983 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9984 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9985 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9986 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9987 IEM_MC_ELSE()
9988 IEM_MC_IF_FCW_IM()
9989 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9990 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9991 IEM_MC_ENDIF();
9992 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9993 IEM_MC_ENDIF();
9994 IEM_MC_ADVANCE_RIP_AND_FINISH();
9995
9996 IEM_MC_END();
9997}
9998
9999
10000/** Opcode 0xdf !11/3. */
10001FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10002{
10003 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10004 IEM_MC_BEGIN(3, 2);
10005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10006 IEM_MC_LOCAL(uint16_t, u16Fsw);
10007 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10008 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10010
10011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10015
10016 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10017 IEM_MC_PREPARE_FPU_USAGE();
10018 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10019 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10020 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10021 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10022 IEM_MC_ELSE()
10023 IEM_MC_IF_FCW_IM()
10024 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10025 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10026 IEM_MC_ENDIF();
10027 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10028 IEM_MC_ENDIF();
10029 IEM_MC_ADVANCE_RIP_AND_FINISH();
10030
10031 IEM_MC_END();
10032}
10033
10034
10035/** Opcode 0xdf !11/4. */
10036FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10037{
10038 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10039
10040 IEM_MC_BEGIN(2, 3);
10041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10042 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10043 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10044 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10045 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10046
10047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10049
10050 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10052 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10053
10054 IEM_MC_PREPARE_FPU_USAGE();
10055 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10056 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10057 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10058 IEM_MC_ELSE()
10059 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10060 IEM_MC_ENDIF();
10061 IEM_MC_ADVANCE_RIP_AND_FINISH();
10062
10063 IEM_MC_END();
10064}
10065
10066
10067/** Opcode 0xdf !11/5. */
10068FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10069{
10070 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10071
10072 IEM_MC_BEGIN(2, 3);
10073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10074 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10075 IEM_MC_LOCAL(int64_t, i64Val);
10076 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10077 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10078
10079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10081
10082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10084 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10085
10086 IEM_MC_PREPARE_FPU_USAGE();
10087 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10088 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10089 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10090 IEM_MC_ELSE()
10091 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10092 IEM_MC_ENDIF();
10093 IEM_MC_ADVANCE_RIP_AND_FINISH();
10094
10095 IEM_MC_END();
10096}
10097
10098
10099/** Opcode 0xdf !11/6. */
10100FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10101{
10102 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10103 IEM_MC_BEGIN(3, 2);
10104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10105 IEM_MC_LOCAL(uint16_t, u16Fsw);
10106 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10107 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10109
10110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10112 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10113 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10114
10115 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10116 IEM_MC_PREPARE_FPU_USAGE();
10117 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10118 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10119 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10120 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10121 IEM_MC_ELSE()
10122 IEM_MC_IF_FCW_IM()
10123 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10124 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10125 IEM_MC_ENDIF();
10126 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10127 IEM_MC_ENDIF();
10128 IEM_MC_ADVANCE_RIP_AND_FINISH();
10129
10130 IEM_MC_END();
10131}
10132
10133
10134/** Opcode 0xdf !11/7. */
10135FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10136{
10137 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10138 IEM_MC_BEGIN(3, 2);
10139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10140 IEM_MC_LOCAL(uint16_t, u16Fsw);
10141 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10142 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10143 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10144
10145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10149
10150 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10151 IEM_MC_PREPARE_FPU_USAGE();
10152 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10153 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10154 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10155 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10156 IEM_MC_ELSE()
10157 IEM_MC_IF_FCW_IM()
10158 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10159 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10160 IEM_MC_ENDIF();
10161 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10162 IEM_MC_ENDIF();
10163 IEM_MC_ADVANCE_RIP_AND_FINISH();
10164
10165 IEM_MC_END();
10166}
10167
10168
10169/**
10170 * @opcode 0xdf
10171 */
10172FNIEMOP_DEF(iemOp_EscF7)
10173{
10174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10175 if (IEM_IS_MODRM_REG_MODE(bRm))
10176 {
10177 switch (IEM_GET_MODRM_REG_8(bRm))
10178 {
10179 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10180 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10181 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10182 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10183 case 4: if (bRm == 0xe0)
10184 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10185 return IEMOP_RAISE_INVALID_OPCODE();
10186 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10187 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10188 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10190 }
10191 }
10192 else
10193 {
10194 switch (IEM_GET_MODRM_REG_8(bRm))
10195 {
10196 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10197 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10198 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10199 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10200 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10201 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10202 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10203 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10205 }
10206 }
10207}
10208
10209
10210/**
10211 * @opcode 0xe0
10212 */
10213FNIEMOP_DEF(iemOp_loopne_Jb)
10214{
10215 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10216 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10218 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10219
10220 switch (pVCpu->iem.s.enmEffAddrMode)
10221 {
10222 case IEMMODE_16BIT:
10223 IEM_MC_BEGIN(0,0);
10224 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10225 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10226 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10227 } IEM_MC_ELSE() {
10228 IEM_MC_ADVANCE_RIP_AND_FINISH();
10229 } IEM_MC_ENDIF();
10230 IEM_MC_END();
10231 break;
10232
10233 case IEMMODE_32BIT:
10234 IEM_MC_BEGIN(0,0);
10235 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10236 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10237 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10238 } IEM_MC_ELSE() {
10239 IEM_MC_ADVANCE_RIP_AND_FINISH();
10240 } IEM_MC_ENDIF();
10241 IEM_MC_END();
10242 break;
10243
10244 case IEMMODE_64BIT:
10245 IEM_MC_BEGIN(0,0);
10246 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10247 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10248 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10249 } IEM_MC_ELSE() {
10250 IEM_MC_ADVANCE_RIP_AND_FINISH();
10251 } IEM_MC_ENDIF();
10252 IEM_MC_END();
10253 break;
10254
10255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10256 }
10257}
10258
10259
10260/**
10261 * @opcode 0xe1
10262 */
10263FNIEMOP_DEF(iemOp_loope_Jb)
10264{
10265 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10266 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10268 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10269
10270 switch (pVCpu->iem.s.enmEffAddrMode)
10271 {
10272 case IEMMODE_16BIT:
10273 IEM_MC_BEGIN(0,0);
10274 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10275 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10276 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10277 } IEM_MC_ELSE() {
10278 IEM_MC_ADVANCE_RIP_AND_FINISH();
10279 } IEM_MC_ENDIF();
10280 IEM_MC_END();
10281 break;
10282
10283 case IEMMODE_32BIT:
10284 IEM_MC_BEGIN(0,0);
10285 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10286 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10287 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10288 } IEM_MC_ELSE() {
10289 IEM_MC_ADVANCE_RIP_AND_FINISH();
10290 } IEM_MC_ENDIF();
10291 IEM_MC_END();
10292 break;
10293
10294 case IEMMODE_64BIT:
10295 IEM_MC_BEGIN(0,0);
10296 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10297 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10298 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10299 } IEM_MC_ELSE() {
10300 IEM_MC_ADVANCE_RIP_AND_FINISH();
10301 } IEM_MC_ENDIF();
10302 IEM_MC_END();
10303 break;
10304
10305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10306 }
10307}
10308
10309
10310/**
10311 * @opcode 0xe2
10312 */
10313FNIEMOP_DEF(iemOp_loop_Jb)
10314{
10315 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10316 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10319
10320 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
10321 * using the 32-bit operand size override. How can that be restarted? See
10322 * weird pseudo code in intel manual. */
10323
10324 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10325 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10326 * the loop causes guest crashes, but when logging it's nice to skip a few million
10327 * lines of useless output. */
10328#if defined(LOG_ENABLED)
10329 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10330 switch (pVCpu->iem.s.enmEffAddrMode)
10331 {
10332 case IEMMODE_16BIT:
10333 IEM_MC_BEGIN(0,0);
10334 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10335 IEM_MC_ADVANCE_RIP_AND_FINISH();
10336 IEM_MC_END();
10337 break;
10338
10339 case IEMMODE_32BIT:
10340 IEM_MC_BEGIN(0,0);
10341 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10342 IEM_MC_ADVANCE_RIP_AND_FINISH();
10343 IEM_MC_END();
10344 break;
10345
10346 case IEMMODE_64BIT:
10347 IEM_MC_BEGIN(0,0);
10348 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10349 IEM_MC_ADVANCE_RIP_AND_FINISH();
10350 IEM_MC_END();
10351 break;
10352
10353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10354 }
10355#endif
10356
10357 switch (pVCpu->iem.s.enmEffAddrMode)
10358 {
10359 case IEMMODE_16BIT:
10360 IEM_MC_BEGIN(0,0);
10361
10362 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10363 IEM_MC_IF_CX_IS_NZ() {
10364 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10365 } IEM_MC_ELSE() {
10366 IEM_MC_ADVANCE_RIP_AND_FINISH();
10367 } IEM_MC_ENDIF();
10368 IEM_MC_END();
10369 break;
10370
10371 case IEMMODE_32BIT:
10372 IEM_MC_BEGIN(0,0);
10373 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10374 IEM_MC_IF_ECX_IS_NZ() {
10375 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10376 } IEM_MC_ELSE() {
10377 IEM_MC_ADVANCE_RIP_AND_FINISH();
10378 } IEM_MC_ENDIF();
10379 IEM_MC_END();
10380 break;
10381
10382 case IEMMODE_64BIT:
10383 IEM_MC_BEGIN(0,0);
10384 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10385 IEM_MC_IF_RCX_IS_NZ() {
10386 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10387 } IEM_MC_ELSE() {
10388 IEM_MC_ADVANCE_RIP_AND_FINISH();
10389 } IEM_MC_ENDIF();
10390 IEM_MC_END();
10391 break;
10392
10393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10394 }
10395}
10396
10397
10398/**
10399 * @opcode 0xe3
10400 */
10401FNIEMOP_DEF(iemOp_jecxz_Jb)
10402{
10403 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10404 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10406 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10407
10408 switch (pVCpu->iem.s.enmEffAddrMode)
10409 {
10410 case IEMMODE_16BIT:
10411 IEM_MC_BEGIN(0,0);
10412 IEM_MC_IF_CX_IS_NZ() {
10413 IEM_MC_ADVANCE_RIP_AND_FINISH();
10414 } IEM_MC_ELSE() {
10415 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10416 } IEM_MC_ENDIF();
10417 IEM_MC_END();
10418 break;
10419
10420 case IEMMODE_32BIT:
10421 IEM_MC_BEGIN(0,0);
10422 IEM_MC_IF_ECX_IS_NZ() {
10423 IEM_MC_ADVANCE_RIP_AND_FINISH();
10424 } IEM_MC_ELSE() {
10425 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10426 } IEM_MC_ENDIF();
10427 IEM_MC_END();
10428 break;
10429
10430 case IEMMODE_64BIT:
10431 IEM_MC_BEGIN(0,0);
10432 IEM_MC_IF_RCX_IS_NZ() {
10433 IEM_MC_ADVANCE_RIP_AND_FINISH();
10434 } IEM_MC_ELSE() {
10435 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10436 } IEM_MC_ENDIF();
10437 IEM_MC_END();
10438 break;
10439
10440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10441 }
10442}
10443
10444
10445/** Opcode 0xe4 */
10446FNIEMOP_DEF(iemOp_in_AL_Ib)
10447{
10448 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10449 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10451 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10452}
10453
10454
10455/** Opcode 0xe5 */
10456FNIEMOP_DEF(iemOp_in_eAX_Ib)
10457{
10458 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10459 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10461 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10462}
10463
10464
10465/** Opcode 0xe6 */
10466FNIEMOP_DEF(iemOp_out_Ib_AL)
10467{
10468 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10469 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10471 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10472}
10473
10474
10475/** Opcode 0xe7 */
10476FNIEMOP_DEF(iemOp_out_Ib_eAX)
10477{
10478 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10479 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10481 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10482}
10483
10484
10485/**
10486 * @opcode 0xe8
10487 */
10488FNIEMOP_DEF(iemOp_call_Jv)
10489{
10490 IEMOP_MNEMONIC(call_Jv, "call Jv");
10491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10492 switch (pVCpu->iem.s.enmEffOpSize)
10493 {
10494 case IEMMODE_16BIT:
10495 {
10496 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10497 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10498 }
10499
10500 case IEMMODE_32BIT:
10501 {
10502 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10503 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10504 }
10505
10506 case IEMMODE_64BIT:
10507 {
10508 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10509 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10510 }
10511
10512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10513 }
10514}
10515
10516
10517/**
10518 * @opcode 0xe9
10519 */
10520FNIEMOP_DEF(iemOp_jmp_Jv)
10521{
10522 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10523 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10524 switch (pVCpu->iem.s.enmEffOpSize)
10525 {
10526 case IEMMODE_16BIT:
10527 {
10528 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10529 IEM_MC_BEGIN(0, 0);
10530 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
10531 IEM_MC_END();
10532 return VINF_SUCCESS;
10533 }
10534
10535 case IEMMODE_64BIT:
10536 case IEMMODE_32BIT:
10537 {
10538 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10539 IEM_MC_BEGIN(0, 0);
10540 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
10541 IEM_MC_END();
10542 return VINF_SUCCESS;
10543 }
10544
10545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10546 }
10547}
10548
10549
10550/**
10551 * @opcode 0xea
10552 */
10553FNIEMOP_DEF(iemOp_jmp_Ap)
10554{
10555 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10556 IEMOP_HLP_NO_64BIT();
10557
10558 /* Decode the far pointer address and pass it on to the far call C implementation. */
10559 uint32_t offSeg;
10560 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10561 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10562 else
10563 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10564 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10566 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10567}
10568
10569
10570/**
10571 * @opcode 0xeb
10572 */
10573FNIEMOP_DEF(iemOp_jmp_Jb)
10574{
10575 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10576 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10578 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10579
10580 IEM_MC_BEGIN(0, 0);
10581 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10582 IEM_MC_END();
10583 return VINF_SUCCESS;
10584}
10585
10586
10587/** Opcode 0xec */
10588FNIEMOP_DEF(iemOp_in_AL_DX)
10589{
10590 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10592 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10593}
10594
10595
10596/** Opcode 0xed */
10597FNIEMOP_DEF(iemOp_in_eAX_DX)
10598{
10599 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10601 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10602}
10603
10604
10605/** Opcode 0xee */
10606FNIEMOP_DEF(iemOp_out_DX_AL)
10607{
10608 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10610 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10611}
10612
10613
10614/** Opcode 0xef */
10615FNIEMOP_DEF(iemOp_out_DX_eAX)
10616{
10617 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10620}
10621
10622
10623/**
10624 * @opcode 0xf0
10625 */
10626FNIEMOP_DEF(iemOp_lock)
10627{
10628 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10629 if (!pVCpu->iem.s.fDisregardLock)
10630 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10631
10632 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10633 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10634}
10635
10636
10637/**
10638 * @opcode 0xf1
10639 */
10640FNIEMOP_DEF(iemOp_int1)
10641{
10642 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10643 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
10644 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10645 * LOADALL memo. Needs some testing. */
10646 IEMOP_HLP_MIN_386();
10647 /** @todo testcase! */
10648 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10649}
10650
10651
10652/**
10653 * @opcode 0xf2
10654 */
10655FNIEMOP_DEF(iemOp_repne)
10656{
10657 /* This overrides any previous REPE prefix. */
10658 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10659 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10660 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10661
10662 /* For the 4 entry opcode tables, REPNZ overrides any previous
10663 REPZ and operand size prefixes. */
10664 pVCpu->iem.s.idxPrefix = 3;
10665
10666 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10667 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10668}
10669
10670
10671/**
10672 * @opcode 0xf3
10673 */
10674FNIEMOP_DEF(iemOp_repe)
10675{
10676 /* This overrides any previous REPNE prefix. */
10677 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10678 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10679 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10680
10681 /* For the 4 entry opcode tables, REPNZ overrides any previous
10682 REPNZ and operand size prefixes. */
10683 pVCpu->iem.s.idxPrefix = 2;
10684
10685 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10686 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10687}
10688
10689
10690/**
10691 * @opcode 0xf4
10692 */
10693FNIEMOP_DEF(iemOp_hlt)
10694{
10695 IEMOP_MNEMONIC(hlt, "hlt");
10696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10697 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10698}
10699
10700
10701/**
10702 * @opcode 0xf5
10703 */
10704FNIEMOP_DEF(iemOp_cmc)
10705{
10706 IEMOP_MNEMONIC(cmc, "cmc");
10707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10708 IEM_MC_BEGIN(0, 0);
10709 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10710 IEM_MC_ADVANCE_RIP_AND_FINISH();
10711 IEM_MC_END();
10712}
10713
10714
10715/**
10716 * Common implementation of 'inc/dec/not/neg Eb'.
10717 *
10718 * @param bRm The RM byte.
10719 * @param pImpl The instruction implementation.
10720 */
10721FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10722{
10723 if (IEM_IS_MODRM_REG_MODE(bRm))
10724 {
10725 /* register access */
10726 IEM_MC_BEGIN(2, 0);
10727 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10728 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10729 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10730 IEM_MC_REF_EFLAGS(pEFlags);
10731 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10732 IEM_MC_ADVANCE_RIP_AND_FINISH();
10733 IEM_MC_END();
10734 }
10735 else
10736 {
10737 /* memory access. */
10738 IEM_MC_BEGIN(2, 2);
10739 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10740 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10742
10743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10744 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10745 IEM_MC_FETCH_EFLAGS(EFlags);
10746 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10747 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10748 else
10749 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10750
10751 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10752 IEM_MC_COMMIT_EFLAGS(EFlags);
10753 IEM_MC_ADVANCE_RIP_AND_FINISH();
10754 IEM_MC_END();
10755 }
10756}
10757
10758
10759/**
10760 * Common implementation of 'inc/dec/not/neg Ev'.
10761 *
10762 * @param bRm The RM byte.
10763 * @param pImpl The instruction implementation.
10764 */
10765FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10766{
10767 /* Registers are handled by a common worker. */
10768 if (IEM_IS_MODRM_REG_MODE(bRm))
10769 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
10770
10771 /* Memory we do here. */
10772 switch (pVCpu->iem.s.enmEffOpSize)
10773 {
10774 case IEMMODE_16BIT:
10775 IEM_MC_BEGIN(2, 2);
10776 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10777 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10779
10780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10781 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10782 IEM_MC_FETCH_EFLAGS(EFlags);
10783 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10784 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10785 else
10786 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10787
10788 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10789 IEM_MC_COMMIT_EFLAGS(EFlags);
10790 IEM_MC_ADVANCE_RIP_AND_FINISH();
10791 IEM_MC_END();
10792 break;
10793
10794 case IEMMODE_32BIT:
10795 IEM_MC_BEGIN(2, 2);
10796 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10799
10800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10801 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10802 IEM_MC_FETCH_EFLAGS(EFlags);
10803 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10804 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10805 else
10806 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10807
10808 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10809 IEM_MC_COMMIT_EFLAGS(EFlags);
10810 IEM_MC_ADVANCE_RIP_AND_FINISH();
10811 IEM_MC_END();
10812 break;
10813
10814 case IEMMODE_64BIT:
10815 IEM_MC_BEGIN(2, 2);
10816 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10817 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10819
10820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10821 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10822 IEM_MC_FETCH_EFLAGS(EFlags);
10823 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10824 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10825 else
10826 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10827
10828 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10829 IEM_MC_COMMIT_EFLAGS(EFlags);
10830 IEM_MC_ADVANCE_RIP_AND_FINISH();
10831 IEM_MC_END();
10832 break;
10833
10834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10835 }
10836}
10837
10838
10839/** Opcode 0xf6 /0. */
10840FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10841{
10842 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10844
10845 if (IEM_IS_MODRM_REG_MODE(bRm))
10846 {
10847 /* register access */
10848 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10850
10851 IEM_MC_BEGIN(3, 0);
10852 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10853 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10854 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10855 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10856 IEM_MC_REF_EFLAGS(pEFlags);
10857 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10858 IEM_MC_ADVANCE_RIP_AND_FINISH();
10859 IEM_MC_END();
10860 }
10861 else
10862 {
10863 /* memory access. */
10864 IEM_MC_BEGIN(3, 2);
10865 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10866 IEM_MC_ARG(uint8_t, u8Src, 1);
10867 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10869
10870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10871 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10872 IEM_MC_ASSIGN(u8Src, u8Imm);
10873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10874 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10875 IEM_MC_FETCH_EFLAGS(EFlags);
10876 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10877
10878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10879 IEM_MC_COMMIT_EFLAGS(EFlags);
10880 IEM_MC_ADVANCE_RIP_AND_FINISH();
10881 IEM_MC_END();
10882 }
10883}
10884
10885
10886/** Opcode 0xf7 /0. */
10887FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10888{
10889 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10890 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10891
10892 if (IEM_IS_MODRM_REG_MODE(bRm))
10893 {
10894 /* register access */
10895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10896 switch (pVCpu->iem.s.enmEffOpSize)
10897 {
10898 case IEMMODE_16BIT:
10899 {
10900 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10901 IEM_MC_BEGIN(3, 0);
10902 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10903 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10904 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10905 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10906 IEM_MC_REF_EFLAGS(pEFlags);
10907 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10908 IEM_MC_ADVANCE_RIP_AND_FINISH();
10909 IEM_MC_END();
10910 break;
10911 }
10912
10913 case IEMMODE_32BIT:
10914 {
10915 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10916 IEM_MC_BEGIN(3, 0);
10917 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10918 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10919 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10920 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10921 IEM_MC_REF_EFLAGS(pEFlags);
10922 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10923 /* No clearing the high dword here - test doesn't write back the result. */
10924 IEM_MC_ADVANCE_RIP_AND_FINISH();
10925 IEM_MC_END();
10926 break;
10927 }
10928
10929 case IEMMODE_64BIT:
10930 {
10931 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10932 IEM_MC_BEGIN(3, 0);
10933 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10934 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10936 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10937 IEM_MC_REF_EFLAGS(pEFlags);
10938 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10939 IEM_MC_ADVANCE_RIP_AND_FINISH();
10940 IEM_MC_END();
10941 break;
10942 }
10943
10944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10945 }
10946 }
10947 else
10948 {
10949 /* memory access. */
10950 switch (pVCpu->iem.s.enmEffOpSize)
10951 {
10952 case IEMMODE_16BIT:
10953 {
10954 IEM_MC_BEGIN(3, 2);
10955 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10956 IEM_MC_ARG(uint16_t, u16Src, 1);
10957 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10959
10960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10961 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10962 IEM_MC_ASSIGN(u16Src, u16Imm);
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10964 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10965 IEM_MC_FETCH_EFLAGS(EFlags);
10966 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10967
10968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10969 IEM_MC_COMMIT_EFLAGS(EFlags);
10970 IEM_MC_ADVANCE_RIP_AND_FINISH();
10971 IEM_MC_END();
10972 break;
10973 }
10974
10975 case IEMMODE_32BIT:
10976 {
10977 IEM_MC_BEGIN(3, 2);
10978 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10979 IEM_MC_ARG(uint32_t, u32Src, 1);
10980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10982
10983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10984 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10985 IEM_MC_ASSIGN(u32Src, u32Imm);
10986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10987 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10988 IEM_MC_FETCH_EFLAGS(EFlags);
10989 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10990
10991 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10992 IEM_MC_COMMIT_EFLAGS(EFlags);
10993 IEM_MC_ADVANCE_RIP_AND_FINISH();
10994 IEM_MC_END();
10995 break;
10996 }
10997
10998 case IEMMODE_64BIT:
10999 {
11000 IEM_MC_BEGIN(3, 2);
11001 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11002 IEM_MC_ARG(uint64_t, u64Src, 1);
11003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11005
11006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11007 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11008 IEM_MC_ASSIGN(u64Src, u64Imm);
11009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11010 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11011 IEM_MC_FETCH_EFLAGS(EFlags);
11012 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11013
11014 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11015 IEM_MC_COMMIT_EFLAGS(EFlags);
11016 IEM_MC_ADVANCE_RIP_AND_FINISH();
11017 IEM_MC_END();
11018 break;
11019 }
11020
11021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11022 }
11023 }
11024}
11025
11026
11027/** Opcode 0xf6 /4, /5, /6 and /7. */
11028FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11029{
11030 if (IEM_IS_MODRM_REG_MODE(bRm))
11031 {
11032 /* register access */
11033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11034 IEM_MC_BEGIN(3, 1);
11035 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11036 IEM_MC_ARG(uint8_t, u8Value, 1);
11037 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11038 IEM_MC_LOCAL(int32_t, rc);
11039
11040 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11041 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11042 IEM_MC_REF_EFLAGS(pEFlags);
11043 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11044 IEM_MC_IF_LOCAL_IS_Z(rc) {
11045 IEM_MC_ADVANCE_RIP_AND_FINISH();
11046 } IEM_MC_ELSE() {
11047 IEM_MC_RAISE_DIVIDE_ERROR();
11048 } IEM_MC_ENDIF();
11049
11050 IEM_MC_END();
11051 }
11052 else
11053 {
11054 /* memory access. */
11055 IEM_MC_BEGIN(3, 2);
11056 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11057 IEM_MC_ARG(uint8_t, u8Value, 1);
11058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11060 IEM_MC_LOCAL(int32_t, rc);
11061
11062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11064 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11065 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11066 IEM_MC_REF_EFLAGS(pEFlags);
11067 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11068 IEM_MC_IF_LOCAL_IS_Z(rc) {
11069 IEM_MC_ADVANCE_RIP_AND_FINISH();
11070 } IEM_MC_ELSE() {
11071 IEM_MC_RAISE_DIVIDE_ERROR();
11072 } IEM_MC_ENDIF();
11073
11074 IEM_MC_END();
11075 }
11076}
11077
11078
11079/** Opcode 0xf7 /4, /5, /6 and /7. */
11080FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11081{
11082 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11083
11084 if (IEM_IS_MODRM_REG_MODE(bRm))
11085 {
11086 /* register access */
11087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11088 switch (pVCpu->iem.s.enmEffOpSize)
11089 {
11090 case IEMMODE_16BIT:
11091 {
11092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11093 IEM_MC_BEGIN(4, 1);
11094 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11095 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11096 IEM_MC_ARG(uint16_t, u16Value, 2);
11097 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11098 IEM_MC_LOCAL(int32_t, rc);
11099
11100 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11101 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11102 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11103 IEM_MC_REF_EFLAGS(pEFlags);
11104 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11105 IEM_MC_IF_LOCAL_IS_Z(rc) {
11106 IEM_MC_ADVANCE_RIP_AND_FINISH();
11107 } IEM_MC_ELSE() {
11108 IEM_MC_RAISE_DIVIDE_ERROR();
11109 } IEM_MC_ENDIF();
11110
11111 IEM_MC_END();
11112 break;
11113 }
11114
11115 case IEMMODE_32BIT:
11116 {
11117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11118 IEM_MC_BEGIN(4, 1);
11119 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11120 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11121 IEM_MC_ARG(uint32_t, u32Value, 2);
11122 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11123 IEM_MC_LOCAL(int32_t, rc);
11124
11125 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11126 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11127 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11128 IEM_MC_REF_EFLAGS(pEFlags);
11129 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11130 IEM_MC_IF_LOCAL_IS_Z(rc) {
11131 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11132 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11133 IEM_MC_ADVANCE_RIP_AND_FINISH();
11134 } IEM_MC_ELSE() {
11135 IEM_MC_RAISE_DIVIDE_ERROR();
11136 } IEM_MC_ENDIF();
11137
11138 IEM_MC_END();
11139 break;
11140 }
11141
11142 case IEMMODE_64BIT:
11143 {
11144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11145 IEM_MC_BEGIN(4, 1);
11146 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11147 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11148 IEM_MC_ARG(uint64_t, u64Value, 2);
11149 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11150 IEM_MC_LOCAL(int32_t, rc);
11151
11152 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11153 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11154 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11155 IEM_MC_REF_EFLAGS(pEFlags);
11156 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11157 IEM_MC_IF_LOCAL_IS_Z(rc) {
11158 IEM_MC_ADVANCE_RIP_AND_FINISH();
11159 } IEM_MC_ELSE() {
11160 IEM_MC_RAISE_DIVIDE_ERROR();
11161 } IEM_MC_ENDIF();
11162
11163 IEM_MC_END();
11164 break;
11165 }
11166
11167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11168 }
11169 }
11170 else
11171 {
11172 /* memory access. */
11173 switch (pVCpu->iem.s.enmEffOpSize)
11174 {
11175 case IEMMODE_16BIT:
11176 {
11177 IEM_MC_BEGIN(4, 2);
11178 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11179 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11180 IEM_MC_ARG(uint16_t, u16Value, 2);
11181 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11183 IEM_MC_LOCAL(int32_t, rc);
11184
11185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11187 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11188 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11189 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11190 IEM_MC_REF_EFLAGS(pEFlags);
11191 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11192 IEM_MC_IF_LOCAL_IS_Z(rc) {
11193 IEM_MC_ADVANCE_RIP_AND_FINISH();
11194 } IEM_MC_ELSE() {
11195 IEM_MC_RAISE_DIVIDE_ERROR();
11196 } IEM_MC_ENDIF();
11197
11198 IEM_MC_END();
11199 break;
11200 }
11201
11202 case IEMMODE_32BIT:
11203 {
11204 IEM_MC_BEGIN(4, 2);
11205 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11206 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11207 IEM_MC_ARG(uint32_t, u32Value, 2);
11208 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11210 IEM_MC_LOCAL(int32_t, rc);
11211
11212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11214 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11215 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11216 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11217 IEM_MC_REF_EFLAGS(pEFlags);
11218 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11219 IEM_MC_IF_LOCAL_IS_Z(rc) {
11220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11221 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11222 IEM_MC_ADVANCE_RIP_AND_FINISH();
11223 } IEM_MC_ELSE() {
11224 IEM_MC_RAISE_DIVIDE_ERROR();
11225 } IEM_MC_ENDIF();
11226
11227 IEM_MC_END();
11228 break;
11229 }
11230
11231 case IEMMODE_64BIT:
11232 {
11233 IEM_MC_BEGIN(4, 2);
11234 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11235 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11236 IEM_MC_ARG(uint64_t, u64Value, 2);
11237 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11239 IEM_MC_LOCAL(int32_t, rc);
11240
11241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11243 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11244 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11245 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11246 IEM_MC_REF_EFLAGS(pEFlags);
11247 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11248 IEM_MC_IF_LOCAL_IS_Z(rc) {
11249 IEM_MC_ADVANCE_RIP_AND_FINISH();
11250 } IEM_MC_ELSE() {
11251 IEM_MC_RAISE_DIVIDE_ERROR();
11252 } IEM_MC_ENDIF();
11253
11254 IEM_MC_END();
11255 break;
11256 }
11257
11258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11259 }
11260 }
11261}
11262
11263/**
11264 * @opcode 0xf6
11265 */
11266FNIEMOP_DEF(iemOp_Grp3_Eb)
11267{
11268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11269 switch (IEM_GET_MODRM_REG_8(bRm))
11270 {
11271 case 0:
11272 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11273 case 1:
11274/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11275 return IEMOP_RAISE_INVALID_OPCODE();
11276 case 2:
11277 IEMOP_MNEMONIC(not_Eb, "not Eb");
11278 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11279 case 3:
11280 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11281 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11282 case 4:
11283 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11284 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11285 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11286 case 5:
11287 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11289 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11290 case 6:
11291 IEMOP_MNEMONIC(div_Eb, "div Eb");
11292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11293 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11294 case 7:
11295 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11296 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11297 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11299 }
11300}
11301
11302
11303/**
11304 * @opcode 0xf7
11305 */
11306FNIEMOP_DEF(iemOp_Grp3_Ev)
11307{
11308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11309 switch (IEM_GET_MODRM_REG_8(bRm))
11310 {
11311 case 0:
11312 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11313 case 1:
11314/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11315 return IEMOP_RAISE_INVALID_OPCODE();
11316 case 2:
11317 IEMOP_MNEMONIC(not_Ev, "not Ev");
11318 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11319 case 3:
11320 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11321 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11322 case 4:
11323 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11325 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11326 case 5:
11327 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11329 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11330 case 6:
11331 IEMOP_MNEMONIC(div_Ev, "div Ev");
11332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11333 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11334 case 7:
11335 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11336 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11337 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11339 }
11340}
11341
11342
11343/**
11344 * @opcode 0xf8
11345 */
11346FNIEMOP_DEF(iemOp_clc)
11347{
11348 IEMOP_MNEMONIC(clc, "clc");
11349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11350 IEM_MC_BEGIN(0, 0);
11351 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11352 IEM_MC_ADVANCE_RIP_AND_FINISH();
11353 IEM_MC_END();
11354}
11355
11356
11357/**
11358 * @opcode 0xf9
11359 */
11360FNIEMOP_DEF(iemOp_stc)
11361{
11362 IEMOP_MNEMONIC(stc, "stc");
11363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11364 IEM_MC_BEGIN(0, 0);
11365 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11366 IEM_MC_ADVANCE_RIP_AND_FINISH();
11367 IEM_MC_END();
11368}
11369
11370
11371/**
11372 * @opcode 0xfa
11373 */
11374FNIEMOP_DEF(iemOp_cli)
11375{
11376 IEMOP_MNEMONIC(cli, "cli");
11377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11378 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11379}
11380
11381
11382FNIEMOP_DEF(iemOp_sti)
11383{
11384 IEMOP_MNEMONIC(sti, "sti");
11385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11386 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11387}
11388
11389
11390/**
11391 * @opcode 0xfc
11392 */
11393FNIEMOP_DEF(iemOp_cld)
11394{
11395 IEMOP_MNEMONIC(cld, "cld");
11396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11397 IEM_MC_BEGIN(0, 0);
11398 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11399 IEM_MC_ADVANCE_RIP_AND_FINISH();
11400 IEM_MC_END();
11401}
11402
11403
11404/**
11405 * @opcode 0xfd
11406 */
11407FNIEMOP_DEF(iemOp_std)
11408{
11409 IEMOP_MNEMONIC(std, "std");
11410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11411 IEM_MC_BEGIN(0, 0);
11412 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11413 IEM_MC_ADVANCE_RIP_AND_FINISH();
11414 IEM_MC_END();
11415}
11416
11417
11418/**
11419 * @opcode 0xfe
11420 */
11421FNIEMOP_DEF(iemOp_Grp4)
11422{
11423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11424 switch (IEM_GET_MODRM_REG_8(bRm))
11425 {
11426 case 0:
11427 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11428 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11429 case 1:
11430 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11431 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11432 default:
11433 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11434 return IEMOP_RAISE_INVALID_OPCODE();
11435 }
11436}
11437
11438
11439/**
11440 * Opcode 0xff /2.
11441 * @param bRm The RM byte.
11442 */
11443FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11444{
11445 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11446 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11447
11448 if (IEM_IS_MODRM_REG_MODE(bRm))
11449 {
11450 /* The new RIP is taken from a register. */
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 switch (pVCpu->iem.s.enmEffOpSize)
11453 {
11454 case IEMMODE_16BIT:
11455 IEM_MC_BEGIN(1, 0);
11456 IEM_MC_ARG(uint16_t, u16Target, 0);
11457 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11458 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11459 IEM_MC_END()
11460 return VINF_SUCCESS;
11461
11462 case IEMMODE_32BIT:
11463 IEM_MC_BEGIN(1, 0);
11464 IEM_MC_ARG(uint32_t, u32Target, 0);
11465 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11466 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11467 IEM_MC_END()
11468 return VINF_SUCCESS;
11469
11470 case IEMMODE_64BIT:
11471 IEM_MC_BEGIN(1, 0);
11472 IEM_MC_ARG(uint64_t, u64Target, 0);
11473 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11474 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11475 IEM_MC_END()
11476 return VINF_SUCCESS;
11477
11478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11479 }
11480 }
11481 else
11482 {
11483 /* The new RIP is taken from a register. */
11484 switch (pVCpu->iem.s.enmEffOpSize)
11485 {
11486 case IEMMODE_16BIT:
11487 IEM_MC_BEGIN(1, 1);
11488 IEM_MC_ARG(uint16_t, u16Target, 0);
11489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11492 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11493 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11494 IEM_MC_END()
11495 return VINF_SUCCESS;
11496
11497 case IEMMODE_32BIT:
11498 IEM_MC_BEGIN(1, 1);
11499 IEM_MC_ARG(uint32_t, u32Target, 0);
11500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11503 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11504 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11505 IEM_MC_END()
11506 return VINF_SUCCESS;
11507
11508 case IEMMODE_64BIT:
11509 IEM_MC_BEGIN(1, 1);
11510 IEM_MC_ARG(uint64_t, u64Target, 0);
11511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11514 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11515 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11516 IEM_MC_END()
11517 return VINF_SUCCESS;
11518
11519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11520 }
11521 }
11522}
11523
11524typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11525
11526FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11527{
11528 /* Registers? How?? */
11529 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
11530 { /* likely */ }
11531 else
11532 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11533
11534 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */
11535 /** @todo what does VIA do? */
11536 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu))
11537 { /* likely */ }
11538 else
11539 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT;
11540
11541 /* Far pointer loaded from memory. */
11542 switch (pVCpu->iem.s.enmEffOpSize)
11543 {
11544 case IEMMODE_16BIT:
11545 IEM_MC_BEGIN(3, 1);
11546 IEM_MC_ARG(uint16_t, u16Sel, 0);
11547 IEM_MC_ARG(uint16_t, offSeg, 1);
11548 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11552 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11553 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11554 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11555 IEM_MC_END();
11556 return VINF_SUCCESS;
11557
11558 case IEMMODE_32BIT:
11559 IEM_MC_BEGIN(3, 1);
11560 IEM_MC_ARG(uint16_t, u16Sel, 0);
11561 IEM_MC_ARG(uint32_t, offSeg, 1);
11562 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11566 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11567 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11568 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11569 IEM_MC_END();
11570 return VINF_SUCCESS;
11571
11572 case IEMMODE_64BIT:
11573 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu));
11574 IEM_MC_BEGIN(3, 1);
11575 IEM_MC_ARG(uint16_t, u16Sel, 0);
11576 IEM_MC_ARG(uint64_t, offSeg, 1);
11577 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2);
11578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11582 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11583 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11584 IEM_MC_END();
11585 return VINF_SUCCESS;
11586
11587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11588 }
11589}
11590
11591
11592/**
11593 * Opcode 0xff /3.
11594 * @param bRm The RM byte.
11595 */
11596FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11597{
11598 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11599 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11600}
11601
11602
11603/**
11604 * Opcode 0xff /4.
11605 * @param bRm The RM byte.
11606 */
11607FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11608{
11609 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11611
11612 if (IEM_IS_MODRM_REG_MODE(bRm))
11613 {
11614 /* The new RIP is taken from a register. */
11615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11616 switch (pVCpu->iem.s.enmEffOpSize)
11617 {
11618 case IEMMODE_16BIT:
11619 IEM_MC_BEGIN(0, 1);
11620 IEM_MC_LOCAL(uint16_t, u16Target);
11621 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11622 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11623 IEM_MC_END()
11624 return VINF_SUCCESS;
11625
11626 case IEMMODE_32BIT:
11627 IEM_MC_BEGIN(0, 1);
11628 IEM_MC_LOCAL(uint32_t, u32Target);
11629 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11630 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11631 IEM_MC_END()
11632 return VINF_SUCCESS;
11633
11634 case IEMMODE_64BIT:
11635 IEM_MC_BEGIN(0, 1);
11636 IEM_MC_LOCAL(uint64_t, u64Target);
11637 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11638 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11639 IEM_MC_END()
11640 return VINF_SUCCESS;
11641
11642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11643 }
11644 }
11645 else
11646 {
11647 /* The new RIP is taken from a memory location. */
11648 switch (pVCpu->iem.s.enmEffOpSize)
11649 {
11650 case IEMMODE_16BIT:
11651 IEM_MC_BEGIN(0, 2);
11652 IEM_MC_LOCAL(uint16_t, u16Target);
11653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11656 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11657 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11658 IEM_MC_END()
11659 return VINF_SUCCESS;
11660
11661 case IEMMODE_32BIT:
11662 IEM_MC_BEGIN(0, 2);
11663 IEM_MC_LOCAL(uint32_t, u32Target);
11664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11667 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11668 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11669 IEM_MC_END()
11670 return VINF_SUCCESS;
11671
11672 case IEMMODE_64BIT:
11673 IEM_MC_BEGIN(0, 2);
11674 IEM_MC_LOCAL(uint64_t, u64Target);
11675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11678 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11679 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11680 IEM_MC_END()
11681 return VINF_SUCCESS;
11682
11683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11684 }
11685 }
11686}
11687
11688
11689/**
11690 * Opcode 0xff /5.
11691 * @param bRm The RM byte.
11692 */
11693FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11694{
11695 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11696 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11697}
11698
11699
11700/**
11701 * Opcode 0xff /6.
11702 * @param bRm The RM byte.
11703 */
11704FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11705{
11706 IEMOP_MNEMONIC(push_Ev, "push Ev");
11707
11708 /* Registers are handled by a common worker. */
11709 if (IEM_IS_MODRM_REG_MODE(bRm))
11710 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
11711
11712 /* Memory we do here. */
11713 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11714 switch (pVCpu->iem.s.enmEffOpSize)
11715 {
11716 case IEMMODE_16BIT:
11717 IEM_MC_BEGIN(0, 2);
11718 IEM_MC_LOCAL(uint16_t, u16Src);
11719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11722 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11723 IEM_MC_PUSH_U16(u16Src);
11724 IEM_MC_ADVANCE_RIP_AND_FINISH();
11725 IEM_MC_END();
11726 break;
11727
11728 case IEMMODE_32BIT:
11729 IEM_MC_BEGIN(0, 2);
11730 IEM_MC_LOCAL(uint32_t, u32Src);
11731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11734 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11735 IEM_MC_PUSH_U32(u32Src);
11736 IEM_MC_ADVANCE_RIP_AND_FINISH();
11737 IEM_MC_END();
11738 break;
11739
11740 case IEMMODE_64BIT:
11741 IEM_MC_BEGIN(0, 2);
11742 IEM_MC_LOCAL(uint64_t, u64Src);
11743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11746 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11747 IEM_MC_PUSH_U64(u64Src);
11748 IEM_MC_ADVANCE_RIP_AND_FINISH();
11749 IEM_MC_END();
11750 break;
11751
11752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11753 }
11754}
11755
11756
11757/**
11758 * @opcode 0xff
11759 */
11760FNIEMOP_DEF(iemOp_Grp5)
11761{
11762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11763 switch (IEM_GET_MODRM_REG_8(bRm))
11764 {
11765 case 0:
11766 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11767 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11768 case 1:
11769 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11770 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11771 case 2:
11772 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11773 case 3:
11774 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11775 case 4:
11776 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11777 case 5:
11778 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11779 case 6:
11780 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11781 case 7:
11782 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11783 return IEMOP_RAISE_INVALID_OPCODE();
11784 }
11785 AssertFailedReturn(VERR_IEM_IPE_3);
11786}
11787
11788
11789
11790const PFNIEMOP g_apfnOneByteMap[256] =
11791{
11792 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11793 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11794 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11795 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11796 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11797 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11798 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11799 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11800 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11801 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11802 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11803 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11804 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11805 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11806 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11807 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11808 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11809 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11810 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11811 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11812 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11813 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11814 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11815 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11816 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11817 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11818 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11819 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11820 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11821 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11822 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11823 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11824 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11825 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11826 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11827 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11828 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11829 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11830 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11831 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11832 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11833 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11834 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11835 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11836 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11837 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11838 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11839 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11840 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11841 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11842 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11843 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11844 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11845 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11846 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11847 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11848 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11849 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11850 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11851 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11852 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11853 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11854 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11855 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11856};
11857
11858
11859/** @} */
11860
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette