VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 96933

Last change on this file since 96933 was 96933, checked in by vboxsync, 2 years ago

Improved comments.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 395.4 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 96933 2022-09-29 18:26:09Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/* Instruction specification format - work in progress: */
60
61/**
62 * @opcode 0x00
63 * @opmnemonic add
64 * @op1 rm:Eb
65 * @op2 reg:Gb
66 * @opmaps one
67 * @openc ModR/M
68 * @opflmodify cf,pf,af,zf,sf,of
69 * @ophints harmless ignores_op_sizes
70 * @opstats add_Eb_Gb
71 * @opgroup og_gen_arith_bin
72 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
73 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
74 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
75 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
76 */
77FNIEMOP_DEF(iemOp_add_Eb_Gb)
78{
79 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
81}
82
83
84/**
85 * @opcode 0x01
86 * @opgroup og_gen_arith_bin
87 * @opflmodify cf,pf,af,zf,sf,of
88 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
89 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
90 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
91 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
92 */
93FNIEMOP_DEF(iemOp_add_Ev_Gv)
94{
95 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
97}
98
99
100/**
101 * @opcode 0x02
102 * @opgroup og_gen_arith_bin
103 * @opflmodify cf,pf,af,zf,sf,of
104 * @opcopytests iemOp_add_Eb_Gb
105 */
106FNIEMOP_DEF(iemOp_add_Gb_Eb)
107{
108 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
110}
111
112
113/**
114 * @opcode 0x03
115 * @opgroup og_gen_arith_bin
116 * @opflmodify cf,pf,af,zf,sf,of
117 * @opcopytests iemOp_add_Ev_Gv
118 */
119FNIEMOP_DEF(iemOp_add_Gv_Ev)
120{
121 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
123}
124
125
126/**
127 * @opcode 0x04
128 * @opgroup og_gen_arith_bin
129 * @opflmodify cf,pf,af,zf,sf,of
130 * @opcopytests iemOp_add_Eb_Gb
131 */
132FNIEMOP_DEF(iemOp_add_Al_Ib)
133{
134 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
136}
137
138
139/**
140 * @opcode 0x05
141 * @opgroup og_gen_arith_bin
142 * @opflmodify cf,pf,af,zf,sf,of
143 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
144 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
145 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
146 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
147 */
148FNIEMOP_DEF(iemOp_add_eAX_Iz)
149{
150 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
152}
153
154
155/**
156 * @opcode 0x06
157 * @opgroup og_stack_sreg
158 */
159FNIEMOP_DEF(iemOp_push_ES)
160{
161 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
162 IEMOP_HLP_NO_64BIT();
163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
164}
165
166
167/**
168 * @opcode 0x07
169 * @opgroup og_stack_sreg
170 */
171FNIEMOP_DEF(iemOp_pop_ES)
172{
173 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
174 IEMOP_HLP_NO_64BIT();
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
177}
178
179
180/**
181 * @opcode 0x08
182 * @opgroup og_gen_arith_bin
183 * @opflmodify cf,pf,af,zf,sf,of
184 * @opflundef af
185 * @opflclear of,cf
186 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
187 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
188 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
189 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
190 */
191FNIEMOP_DEF(iemOp_or_Eb_Gb)
192{
193 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
196}
197
198
199/*
200 * @opcode 0x09
201 * @opgroup og_gen_arith_bin
202 * @opflmodify cf,pf,af,zf,sf,of
203 * @opflundef af
204 * @opflclear of,cf
205 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
206 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
207 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
208 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
209 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
210 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
211 */
212FNIEMOP_DEF(iemOp_or_Ev_Gv)
213{
214 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
217}
218
219
220/**
221 * @opcode 0x0a
222 * @opgroup og_gen_arith_bin
223 * @opflmodify cf,pf,af,zf,sf,of
224 * @opflundef af
225 * @opflclear of,cf
226 * @opcopytests iemOp_or_Eb_Gb
227 */
228FNIEMOP_DEF(iemOp_or_Gb_Eb)
229{
230 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
233}
234
235
236/**
237 * @opcode 0x0b
238 * @opgroup og_gen_arith_bin
239 * @opflmodify cf,pf,af,zf,sf,of
240 * @opflundef af
241 * @opflclear of,cf
242 * @opcopytests iemOp_or_Ev_Gv
243 */
244FNIEMOP_DEF(iemOp_or_Gv_Ev)
245{
246 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0c
254 * @opgroup og_gen_arith_bin
255 * @opflmodify cf,pf,af,zf,sf,of
256 * @opflundef af
257 * @opflclear of,cf
258 * @opcopytests iemOp_or_Eb_Gb
259 */
260FNIEMOP_DEF(iemOp_or_Al_Ib)
261{
262 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
265}
266
267
268/**
269 * @opcode 0x0d
270 * @opgroup og_gen_arith_bin
271 * @opflmodify cf,pf,af,zf,sf,of
272 * @opflundef af
273 * @opflclear of,cf
274 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
275 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
276 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
277 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
278 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
279 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
280 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
281 */
282FNIEMOP_DEF(iemOp_or_eAX_Iz)
283{
284 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
287}
288
289
290/**
291 * @opcode 0x0e
292 * @opgroup og_stack_sreg
293 */
294FNIEMOP_DEF(iemOp_push_CS)
295{
296 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
297 IEMOP_HLP_NO_64BIT();
298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
299}
300
301
302/**
303 * @opcode 0x0f
304 * @opmnemonic EscTwo0f
305 * @openc two0f
306 * @opdisenum OP_2B_ESC
307 * @ophints harmless
308 * @opgroup og_escapes
309 */
310FNIEMOP_DEF(iemOp_2byteEscape)
311{
312#ifdef VBOX_STRICT
313 /* Sanity check the table the first time around. */
314 static bool s_fTested = false;
315 if (RT_LIKELY(s_fTested)) { /* likely */ }
316 else
317 {
318 s_fTested = true;
319 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
320 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
321 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
322 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
323 }
324#endif
325
326 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
327 {
328 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
329 IEMOP_HLP_MIN_286();
330 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
331 }
332 /* @opdone */
333
334 /*
335 * On the 8086 this is a POP CS instruction.
336 * For the time being we don't specify this this.
337 */
338 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
339 IEMOP_HLP_NO_64BIT();
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
342}
343
344/**
345 * @opcode 0x10
346 * @opgroup og_gen_arith_bin
347 * @opfltest cf
348 * @opflmodify cf,pf,af,zf,sf,of
349 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
350 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
351 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
352 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
353 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
354 */
355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
356{
357 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
359}
360
361
362/**
363 * @opcode 0x11
364 * @opgroup og_gen_arith_bin
365 * @opfltest cf
366 * @opflmodify cf,pf,af,zf,sf,of
367 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
368 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
369 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
370 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
371 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
372 */
373FNIEMOP_DEF(iemOp_adc_Ev_Gv)
374{
375 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x12
382 * @opgroup og_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify cf,pf,af,zf,sf,of
385 * @opcopytests iemOp_adc_Eb_Gb
386 */
387FNIEMOP_DEF(iemOp_adc_Gb_Eb)
388{
389 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
391}
392
393
394/**
395 * @opcode 0x13
396 * @opgroup og_gen_arith_bin
397 * @opfltest cf
398 * @opflmodify cf,pf,af,zf,sf,of
399 * @opcopytests iemOp_adc_Ev_Gv
400 */
401FNIEMOP_DEF(iemOp_adc_Gv_Ev)
402{
403 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
405}
406
407
408/**
409 * @opcode 0x14
410 * @opgroup og_gen_arith_bin
411 * @opfltest cf
412 * @opflmodify cf,pf,af,zf,sf,of
413 * @opcopytests iemOp_adc_Eb_Gb
414 */
415FNIEMOP_DEF(iemOp_adc_Al_Ib)
416{
417 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
419}
420
421
422/**
423 * @opcode 0x15
424 * @opgroup og_gen_arith_bin
425 * @opfltest cf
426 * @opflmodify cf,pf,af,zf,sf,of
427 * @opcopytests iemOp_adc_Ev_Gv
428 */
429FNIEMOP_DEF(iemOp_adc_eAX_Iz)
430{
431 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
433}
434
435
436/**
437 * @opcode 0x16
438 */
439FNIEMOP_DEF(iemOp_push_SS)
440{
441 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
442 IEMOP_HLP_NO_64BIT();
443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
444}
445
446
447/**
448 * @opcode 0x17
449 * @opgroup og_gen_arith_bin
450 * @opfltest cf
451 * @opflmodify cf,pf,af,zf,sf,of
452 */
453FNIEMOP_DEF(iemOp_pop_SS)
454{
455 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_HLP_NO_64BIT();
458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
459}
460
461
462/**
463 * @opcode 0x18
464 * @opgroup og_gen_arith_bin
465 * @opfltest cf
466 * @opflmodify cf,pf,af,zf,sf,of
467 */
468FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
469{
470 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
472}
473
474
475/**
476 * @opcode 0x19
477 * @opgroup og_gen_arith_bin
478 * @opfltest cf
479 * @opflmodify cf,pf,af,zf,sf,of
480 */
481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
482{
483 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
485}
486
487
488/**
489 * @opcode 0x1a
490 * @opgroup og_gen_arith_bin
491 * @opfltest cf
492 * @opflmodify cf,pf,af,zf,sf,of
493 */
494FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
495{
496 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
498}
499
500
501/**
502 * @opcode 0x1b
503 * @opgroup og_gen_arith_bin
504 * @opfltest cf
505 * @opflmodify cf,pf,af,zf,sf,of
506 */
507FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
508{
509 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
511}
512
513
514/**
515 * @opcode 0x1c
516 * @opgroup og_gen_arith_bin
517 * @opfltest cf
518 * @opflmodify cf,pf,af,zf,sf,of
519 */
520FNIEMOP_DEF(iemOp_sbb_Al_Ib)
521{
522 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
524}
525
526
527/**
528 * @opcode 0x1d
529 * @opgroup og_gen_arith_bin
530 * @opfltest cf
531 * @opflmodify cf,pf,af,zf,sf,of
532 */
533FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
534{
535 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
537}
538
539
540/**
541 * @opcode 0x1e
542 * @opgroup og_stack_sreg
543 */
544FNIEMOP_DEF(iemOp_push_DS)
545{
546 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
547 IEMOP_HLP_NO_64BIT();
548 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
549}
550
551
552/**
553 * @opcode 0x1f
554 * @opgroup og_stack_sreg
555 */
556FNIEMOP_DEF(iemOp_pop_DS)
557{
558 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEMOP_HLP_NO_64BIT();
561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
562}
563
564
565/**
566 * @opcode 0x20
567 * @opgroup og_gen_arith_bin
568 * @opflmodify cf,pf,af,zf,sf,of
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Eb_Gb)
573{
574 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x21
582 * @opgroup og_gen_arith_bin
583 * @opflmodify cf,pf,af,zf,sf,of
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Ev_Gv)
588{
589 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x22
597 * @opgroup og_gen_arith_bin
598 * @opflmodify cf,pf,af,zf,sf,of
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Gb_Eb)
603{
604 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x23
612 * @opgroup og_gen_arith_bin
613 * @opflmodify cf,pf,af,zf,sf,of
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_Gv_Ev)
618{
619 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x24
627 * @opgroup og_gen_arith_bin
628 * @opflmodify cf,pf,af,zf,sf,of
629 * @opflundef af
630 * @opflclear of,cf
631 */
632FNIEMOP_DEF(iemOp_and_Al_Ib)
633{
634 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
637}
638
639
640/**
641 * @opcode 0x25
642 * @opgroup og_gen_arith_bin
643 * @opflmodify cf,pf,af,zf,sf,of
644 * @opflundef af
645 * @opflclear of,cf
646 */
647FNIEMOP_DEF(iemOp_and_eAX_Iz)
648{
649 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
652}
653
654
655/**
656 * @opcode 0x26
657 * @opmnemonic SEG
658 * @op1 ES
659 * @opgroup og_prefix
660 * @openc prefix
661 * @opdisenum OP_SEG
662 * @ophints harmless
663 */
664FNIEMOP_DEF(iemOp_seg_ES)
665{
666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
668 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
669
670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
672}
673
674
675/**
676 * @opcode 0x27
677 * @opfltest af,cf
678 * @opflmodify cf,pf,af,zf,sf,of
679 * @opflundef of
680 */
681FNIEMOP_DEF(iemOp_daa)
682{
683 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
684 IEMOP_HLP_NO_64BIT();
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
688}
689
690
691/**
692 * @opcode 0x28
693 * @opgroup og_gen_arith_bin
694 * @opflmodify cf,pf,af,zf,sf,of
695 */
696FNIEMOP_DEF(iemOp_sub_Eb_Gb)
697{
698 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
700}
701
702
703/**
704 * @opcode 0x29
705 * @opgroup og_gen_arith_bin
706 * @opflmodify cf,pf,af,zf,sf,of
707 */
708FNIEMOP_DEF(iemOp_sub_Ev_Gv)
709{
710 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
712}
713
714
715/**
716 * @opcode 0x2a
717 * @opgroup og_gen_arith_bin
718 * @opflmodify cf,pf,af,zf,sf,of
719 */
720FNIEMOP_DEF(iemOp_sub_Gb_Eb)
721{
722 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
724}
725
726
727/**
728 * @opcode 0x2b
729 * @opgroup og_gen_arith_bin
730 * @opflmodify cf,pf,af,zf,sf,of
731 */
732FNIEMOP_DEF(iemOp_sub_Gv_Ev)
733{
734 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
736}
737
738
739/**
740 * @opcode 0x2c
741 * @opgroup og_gen_arith_bin
742 * @opflmodify cf,pf,af,zf,sf,of
743 */
744FNIEMOP_DEF(iemOp_sub_Al_Ib)
745{
746 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
748}
749
750
751/**
752 * @opcode 0x2d
753 * @opgroup og_gen_arith_bin
754 * @opflmodify cf,pf,af,zf,sf,of
755 */
756FNIEMOP_DEF(iemOp_sub_eAX_Iz)
757{
758 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
760}
761
762
763/**
764 * @opcode 0x2e
765 * @opmnemonic SEG
766 * @op1 CS
767 * @opgroup og_prefix
768 * @openc prefix
769 * @opdisenum OP_SEG
770 * @ophints harmless
771 */
772FNIEMOP_DEF(iemOp_seg_CS)
773{
774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
775 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
776 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
777
778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
780}
781
782
783/**
784 * @opcode 0x2f
785 * @opfltest af,cf
786 * @opflmodify cf,pf,af,zf,sf,of
787 * @opflundef of
788 */
789FNIEMOP_DEF(iemOp_das)
790{
791 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
796}
797
798
799/**
800 * @opcode 0x30
801 * @opgroup og_gen_arith_bin
802 * @opflmodify cf,pf,af,zf,sf,of
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Eb_Gb)
807{
808 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x31
816 * @opgroup og_gen_arith_bin
817 * @opflmodify cf,pf,af,zf,sf,of
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Ev_Gv)
822{
823 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x32
831 * @opgroup og_gen_arith_bin
832 * @opflmodify cf,pf,af,zf,sf,of
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Gb_Eb)
837{
838 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x33
846 * @opgroup og_gen_arith_bin
847 * @opflmodify cf,pf,af,zf,sf,of
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_Gv_Ev)
852{
853 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x34
861 * @opgroup og_gen_arith_bin
862 * @opflmodify cf,pf,af,zf,sf,of
863 * @opflundef af
864 * @opflclear of,cf
865 */
866FNIEMOP_DEF(iemOp_xor_Al_Ib)
867{
868 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
871}
872
873
874/**
875 * @opcode 0x35
876 * @opgroup og_gen_arith_bin
877 * @opflmodify cf,pf,af,zf,sf,of
878 * @opflundef af
879 * @opflclear of,cf
880 */
881FNIEMOP_DEF(iemOp_xor_eAX_Iz)
882{
883 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
886}
887
888
889/**
890 * @opcode 0x36
891 * @opmnemonic SEG
892 * @op1 SS
893 * @opgroup og_prefix
894 * @openc prefix
895 * @opdisenum OP_SEG
896 * @ophints harmless
897 */
898FNIEMOP_DEF(iemOp_seg_SS)
899{
900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
902 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
903
904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
906}
907
908
909/**
910 * @opcode 0x37
911 * @opfltest af,cf
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef pf,zf,sf,of
914 * @opgroup og_gen_arith_dec
915 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
916 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
917 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
918 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
919 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
920 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
921 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
922 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
925 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
926 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
927 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
928 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
929 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
930 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
931 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
933 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
936 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
937 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
938 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
939 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
940 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
941 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
942 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
943 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
944 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
945 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
946 */
947FNIEMOP_DEF(iemOp_aaa)
948{
949 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
950 IEMOP_HLP_NO_64BIT();
951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
953
954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
955}
956
957
958/**
959 * @opcode 0x38
960 */
961FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
962{
963 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x39
970 */
971FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
972{
973 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3a
980 */
981FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
982{
983 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3b
990 */
991FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
992{
993 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3c
1000 */
1001FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1002{
1003 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3d
1010 */
1011FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1012{
1013 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1015}
1016
1017
1018/**
1019 * @opcode 0x3e
1020 */
1021FNIEMOP_DEF(iemOp_seg_DS)
1022{
1023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1025 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1026
1027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1029}
1030
1031
1032/**
1033 * @opcode 0x3f
1034 * @opfltest af,cf
1035 * @opflmodify cf,pf,af,zf,sf,of
1036 * @opflundef pf,zf,sf,of
1037 * @opgroup og_gen_arith_dec
1038 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1039 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1040 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1041 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1042 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1043 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1045 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1046 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1047 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1048 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1049 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1050 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1051 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1052 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1053 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1058 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1059 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1060 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1061 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1062 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1063 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1064 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1065 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1066 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1067 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1068 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1069 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1070 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1071 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1072 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1073 */
1074FNIEMOP_DEF(iemOp_aas)
1075{
1076 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1077 IEMOP_HLP_NO_64BIT();
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1080
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1082}
1083
1084
1085/**
1086 * Common 'inc/dec/not/neg register' helper.
1087 */
1088FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1089{
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 switch (pVCpu->iem.s.enmEffOpSize)
1092 {
1093 case IEMMODE_16BIT:
1094 IEM_MC_BEGIN(2, 0);
1095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1096 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1097 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1098 IEM_MC_REF_EFLAGS(pEFlags);
1099 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 return VINF_SUCCESS;
1103
1104 case IEMMODE_32BIT:
1105 IEM_MC_BEGIN(2, 0);
1106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1107 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1108 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1109 IEM_MC_REF_EFLAGS(pEFlags);
1110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1112 IEM_MC_ADVANCE_RIP();
1113 IEM_MC_END();
1114 return VINF_SUCCESS;
1115
1116 case IEMMODE_64BIT:
1117 IEM_MC_BEGIN(2, 0);
1118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1119 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1120 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1121 IEM_MC_REF_EFLAGS(pEFlags);
1122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1123 IEM_MC_ADVANCE_RIP();
1124 IEM_MC_END();
1125 return VINF_SUCCESS;
1126 }
1127 return VINF_SUCCESS;
1128}
1129
1130
1131/**
1132 * @opcode 0x40
1133 */
1134FNIEMOP_DEF(iemOp_inc_eAX)
1135{
1136 /*
1137 * This is a REX prefix in 64-bit mode.
1138 */
1139 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1140 {
1141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1143
1144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1146 }
1147
1148 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1149 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1150}
1151
1152
1153/**
1154 * @opcode 0x41
1155 */
1156FNIEMOP_DEF(iemOp_inc_eCX)
1157{
1158 /*
1159 * This is a REX prefix in 64-bit mode.
1160 */
1161 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1162 {
1163 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1164 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1165 pVCpu->iem.s.uRexB = 1 << 3;
1166
1167 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1168 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1169 }
1170
1171 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1172 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1173}
1174
1175
1176/**
1177 * @opcode 0x42
1178 */
1179FNIEMOP_DEF(iemOp_inc_eDX)
1180{
1181 /*
1182 * This is a REX prefix in 64-bit mode.
1183 */
1184 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1185 {
1186 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1187 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1188 pVCpu->iem.s.uRexIndex = 1 << 3;
1189
1190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1192 }
1193
1194 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1195 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1196}
1197
1198
1199
1200/**
1201 * @opcode 0x43
1202 */
1203FNIEMOP_DEF(iemOp_inc_eBX)
1204{
1205 /*
1206 * This is a REX prefix in 64-bit mode.
1207 */
1208 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1209 {
1210 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1211 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1212 pVCpu->iem.s.uRexB = 1 << 3;
1213 pVCpu->iem.s.uRexIndex = 1 << 3;
1214
1215 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1216 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1217 }
1218
1219 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1220 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1221}
1222
1223
1224/**
1225 * @opcode 0x44
1226 */
1227FNIEMOP_DEF(iemOp_inc_eSP)
1228{
1229 /*
1230 * This is a REX prefix in 64-bit mode.
1231 */
1232 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1233 {
1234 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1235 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1236 pVCpu->iem.s.uRexReg = 1 << 3;
1237
1238 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1239 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1240 }
1241
1242 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1243 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1244}
1245
1246
1247/**
1248 * @opcode 0x45
1249 */
1250FNIEMOP_DEF(iemOp_inc_eBP)
1251{
1252 /*
1253 * This is a REX prefix in 64-bit mode.
1254 */
1255 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1256 {
1257 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1259 pVCpu->iem.s.uRexReg = 1 << 3;
1260 pVCpu->iem.s.uRexB = 1 << 3;
1261
1262 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1263 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1264 }
1265
1266 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1267 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1268}
1269
1270
1271/**
1272 * @opcode 0x46
1273 */
1274FNIEMOP_DEF(iemOp_inc_eSI)
1275{
1276 /*
1277 * This is a REX prefix in 64-bit mode.
1278 */
1279 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1280 {
1281 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1282 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1283 pVCpu->iem.s.uRexReg = 1 << 3;
1284 pVCpu->iem.s.uRexIndex = 1 << 3;
1285
1286 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1287 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1288 }
1289
1290 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1291 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1292}
1293
1294
1295/**
1296 * @opcode 0x47
1297 */
1298FNIEMOP_DEF(iemOp_inc_eDI)
1299{
1300 /*
1301 * This is a REX prefix in 64-bit mode.
1302 */
1303 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1304 {
1305 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1306 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1307 pVCpu->iem.s.uRexReg = 1 << 3;
1308 pVCpu->iem.s.uRexB = 1 << 3;
1309 pVCpu->iem.s.uRexIndex = 1 << 3;
1310
1311 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1312 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1313 }
1314
1315 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1316 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1317}
1318
1319
1320/**
1321 * @opcode 0x48
1322 */
1323FNIEMOP_DEF(iemOp_dec_eAX)
1324{
1325 /*
1326 * This is a REX prefix in 64-bit mode.
1327 */
1328 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1329 {
1330 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1331 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1332 iemRecalEffOpSize(pVCpu);
1333
1334 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1335 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1336 }
1337
1338 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1339 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1340}
1341
1342
1343/**
1344 * @opcode 0x49
1345 */
1346FNIEMOP_DEF(iemOp_dec_eCX)
1347{
1348 /*
1349 * This is a REX prefix in 64-bit mode.
1350 */
1351 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1352 {
1353 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1355 pVCpu->iem.s.uRexB = 1 << 3;
1356 iemRecalEffOpSize(pVCpu);
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360 }
1361
1362 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1363 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1364}
1365
1366
1367/**
1368 * @opcode 0x4a
1369 */
1370FNIEMOP_DEF(iemOp_dec_eDX)
1371{
1372 /*
1373 * This is a REX prefix in 64-bit mode.
1374 */
1375 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1376 {
1377 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1378 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1379 pVCpu->iem.s.uRexIndex = 1 << 3;
1380 iemRecalEffOpSize(pVCpu);
1381
1382 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1383 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1384 }
1385
1386 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1387 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1388}
1389
1390
1391/**
1392 * @opcode 0x4b
1393 */
1394FNIEMOP_DEF(iemOp_dec_eBX)
1395{
1396 /*
1397 * This is a REX prefix in 64-bit mode.
1398 */
1399 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1400 {
1401 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1402 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1403 pVCpu->iem.s.uRexB = 1 << 3;
1404 pVCpu->iem.s.uRexIndex = 1 << 3;
1405 iemRecalEffOpSize(pVCpu);
1406
1407 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1408 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1409 }
1410
1411 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1412 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1413}
1414
1415
1416/**
1417 * @opcode 0x4c
1418 */
1419FNIEMOP_DEF(iemOp_dec_eSP)
1420{
1421 /*
1422 * This is a REX prefix in 64-bit mode.
1423 */
1424 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1425 {
1426 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1427 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1428 pVCpu->iem.s.uRexReg = 1 << 3;
1429 iemRecalEffOpSize(pVCpu);
1430
1431 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1432 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1433 }
1434
1435 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1436 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1437}
1438
1439
1440/**
1441 * @opcode 0x4d
1442 */
1443FNIEMOP_DEF(iemOp_dec_eBP)
1444{
1445 /*
1446 * This is a REX prefix in 64-bit mode.
1447 */
1448 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1449 {
1450 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1451 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1452 pVCpu->iem.s.uRexReg = 1 << 3;
1453 pVCpu->iem.s.uRexB = 1 << 3;
1454 iemRecalEffOpSize(pVCpu);
1455
1456 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1457 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1458 }
1459
1460 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1461 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1462}
1463
1464
1465/**
1466 * @opcode 0x4e
1467 */
1468FNIEMOP_DEF(iemOp_dec_eSI)
1469{
1470 /*
1471 * This is a REX prefix in 64-bit mode.
1472 */
1473 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1474 {
1475 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1476 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1477 pVCpu->iem.s.uRexReg = 1 << 3;
1478 pVCpu->iem.s.uRexIndex = 1 << 3;
1479 iemRecalEffOpSize(pVCpu);
1480
1481 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1482 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1483 }
1484
1485 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1486 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1487}
1488
1489
1490/**
1491 * @opcode 0x4f
1492 */
1493FNIEMOP_DEF(iemOp_dec_eDI)
1494{
1495 /*
1496 * This is a REX prefix in 64-bit mode.
1497 */
1498 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1499 {
1500 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1501 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1502 pVCpu->iem.s.uRexReg = 1 << 3;
1503 pVCpu->iem.s.uRexB = 1 << 3;
1504 pVCpu->iem.s.uRexIndex = 1 << 3;
1505 iemRecalEffOpSize(pVCpu);
1506
1507 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1508 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1509 }
1510
1511 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1512 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1513}
1514
1515
1516/**
1517 * Common 'push register' helper.
1518 */
1519FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1520{
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1523 {
1524 iReg |= pVCpu->iem.s.uRexB;
1525 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1526 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1527 }
1528
1529 switch (pVCpu->iem.s.enmEffOpSize)
1530 {
1531 case IEMMODE_16BIT:
1532 IEM_MC_BEGIN(0, 1);
1533 IEM_MC_LOCAL(uint16_t, u16Value);
1534 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1535 IEM_MC_PUSH_U16(u16Value);
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 break;
1539
1540 case IEMMODE_32BIT:
1541 IEM_MC_BEGIN(0, 1);
1542 IEM_MC_LOCAL(uint32_t, u32Value);
1543 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1544 IEM_MC_PUSH_U32(u32Value);
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 break;
1548
1549 case IEMMODE_64BIT:
1550 IEM_MC_BEGIN(0, 1);
1551 IEM_MC_LOCAL(uint64_t, u64Value);
1552 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1553 IEM_MC_PUSH_U64(u64Value);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 break;
1557 }
1558
1559 return VINF_SUCCESS;
1560}
1561
1562
1563/**
1564 * @opcode 0x50
1565 */
1566FNIEMOP_DEF(iemOp_push_eAX)
1567{
1568 IEMOP_MNEMONIC(push_rAX, "push rAX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1570}
1571
1572
1573/**
1574 * @opcode 0x51
1575 */
1576FNIEMOP_DEF(iemOp_push_eCX)
1577{
1578 IEMOP_MNEMONIC(push_rCX, "push rCX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1580}
1581
1582
1583/**
1584 * @opcode 0x52
1585 */
1586FNIEMOP_DEF(iemOp_push_eDX)
1587{
1588 IEMOP_MNEMONIC(push_rDX, "push rDX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1590}
1591
1592
1593/**
1594 * @opcode 0x53
1595 */
1596FNIEMOP_DEF(iemOp_push_eBX)
1597{
1598 IEMOP_MNEMONIC(push_rBX, "push rBX");
1599 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1600}
1601
1602
1603/**
1604 * @opcode 0x54
1605 */
1606FNIEMOP_DEF(iemOp_push_eSP)
1607{
1608 IEMOP_MNEMONIC(push_rSP, "push rSP");
1609 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1610 {
1611 IEM_MC_BEGIN(0, 1);
1612 IEM_MC_LOCAL(uint16_t, u16Value);
1613 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1614 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1615 IEM_MC_PUSH_U16(u16Value);
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1620}
1621
1622
1623/**
1624 * @opcode 0x55
1625 */
1626FNIEMOP_DEF(iemOp_push_eBP)
1627{
1628 IEMOP_MNEMONIC(push_rBP, "push rBP");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1630}
1631
1632
1633/**
1634 * @opcode 0x56
1635 */
1636FNIEMOP_DEF(iemOp_push_eSI)
1637{
1638 IEMOP_MNEMONIC(push_rSI, "push rSI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1640}
1641
1642
1643/**
1644 * @opcode 0x57
1645 */
1646FNIEMOP_DEF(iemOp_push_eDI)
1647{
1648 IEMOP_MNEMONIC(push_rDI, "push rDI");
1649 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1650}
1651
1652
1653/**
1654 * Common 'pop register' helper.
1655 */
1656FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1657{
1658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1659 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1660 {
1661 iReg |= pVCpu->iem.s.uRexB;
1662 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1663 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1664 }
1665
1666 switch (pVCpu->iem.s.enmEffOpSize)
1667 {
1668 case IEMMODE_16BIT:
1669 IEM_MC_BEGIN(0, 1);
1670 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1671 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1672 IEM_MC_POP_U16(pu16Dst);
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_32BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1680 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1681 IEM_MC_POP_U32(pu32Dst);
1682 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1683 IEM_MC_ADVANCE_RIP();
1684 IEM_MC_END();
1685 break;
1686
1687 case IEMMODE_64BIT:
1688 IEM_MC_BEGIN(0, 1);
1689 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1690 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1691 IEM_MC_POP_U64(pu64Dst);
1692 IEM_MC_ADVANCE_RIP();
1693 IEM_MC_END();
1694 break;
1695 }
1696
1697 return VINF_SUCCESS;
1698}
1699
1700
1701/**
1702 * @opcode 0x58
1703 */
1704FNIEMOP_DEF(iemOp_pop_eAX)
1705{
1706 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1708}
1709
1710
1711/**
1712 * @opcode 0x59
1713 */
1714FNIEMOP_DEF(iemOp_pop_eCX)
1715{
1716 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5a
1723 */
1724FNIEMOP_DEF(iemOp_pop_eDX)
1725{
1726 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5b
1733 */
1734FNIEMOP_DEF(iemOp_pop_eBX)
1735{
1736 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1737 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1738}
1739
1740
1741/**
1742 * @opcode 0x5c
1743 */
1744FNIEMOP_DEF(iemOp_pop_eSP)
1745{
1746 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1747 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1748 {
1749 if (pVCpu->iem.s.uRexB)
1750 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1751 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1752 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1753 }
1754
1755 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1756 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1757 /** @todo add testcase for this instruction. */
1758 switch (pVCpu->iem.s.enmEffOpSize)
1759 {
1760 case IEMMODE_16BIT:
1761 IEM_MC_BEGIN(0, 1);
1762 IEM_MC_LOCAL(uint16_t, u16Dst);
1763 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1764 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1765 IEM_MC_ADVANCE_RIP();
1766 IEM_MC_END();
1767 break;
1768
1769 case IEMMODE_32BIT:
1770 IEM_MC_BEGIN(0, 1);
1771 IEM_MC_LOCAL(uint32_t, u32Dst);
1772 IEM_MC_POP_U32(&u32Dst);
1773 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 break;
1777
1778 case IEMMODE_64BIT:
1779 IEM_MC_BEGIN(0, 1);
1780 IEM_MC_LOCAL(uint64_t, u64Dst);
1781 IEM_MC_POP_U64(&u64Dst);
1782 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 break;
1786 }
1787
1788 return VINF_SUCCESS;
1789}
1790
1791
1792/**
1793 * @opcode 0x5d
1794 */
1795FNIEMOP_DEF(iemOp_pop_eBP)
1796{
1797 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1799}
1800
1801
1802/**
1803 * @opcode 0x5e
1804 */
1805FNIEMOP_DEF(iemOp_pop_eSI)
1806{
1807 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1809}
1810
1811
1812/**
1813 * @opcode 0x5f
1814 */
1815FNIEMOP_DEF(iemOp_pop_eDI)
1816{
1817 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1818 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1819}
1820
1821
1822/**
1823 * @opcode 0x60
1824 */
1825FNIEMOP_DEF(iemOp_pusha)
1826{
1827 IEMOP_MNEMONIC(pusha, "pusha");
1828 IEMOP_HLP_MIN_186();
1829 IEMOP_HLP_NO_64BIT();
1830 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1831 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1832 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1833 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1834}
1835
1836
1837/**
1838 * @opcode 0x61
1839 */
1840FNIEMOP_DEF(iemOp_popa__mvex)
1841{
1842 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1843 {
1844 IEMOP_MNEMONIC(popa, "popa");
1845 IEMOP_HLP_MIN_186();
1846 IEMOP_HLP_NO_64BIT();
1847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1848 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1849 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1850 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1851 }
1852 IEMOP_MNEMONIC(mvex, "mvex");
1853 Log(("mvex prefix is not supported!\n"));
1854 return IEMOP_RAISE_INVALID_OPCODE();
1855}
1856
1857
1858/**
1859 * @opcode 0x62
1860 * @opmnemonic bound
1861 * @op1 Gv_RO
1862 * @op2 Ma
1863 * @opmincpu 80186
1864 * @ophints harmless invalid_64
1865 * @optest op1=0 op2=0 ->
1866 * @optest op1=1 op2=0 -> value.xcpt=5
1867 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1868 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1869 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1870 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1871 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1872 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1873 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1874 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1875 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1876 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1877 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1878 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1879 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1880 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1881 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1882 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1883 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1884 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1885 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1886 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1887 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1888 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1889 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1890 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1891 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1892 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1893 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1894 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1895 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1896 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1897 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1898 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1899 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1900 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1901 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1902 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1903 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1904 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1905 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1906 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1907 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1908 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1909 */
1910FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1911{
1912 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1913 compatability mode it is invalid with MOD=3.
1914
1915 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1916 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1917 given as R and X without an exact description, so we assume it builds on
1918 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1919 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1920 uint8_t bRm;
1921 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1922 {
1923 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1924 IEMOP_HLP_MIN_186();
1925 IEM_OPCODE_GET_NEXT_U8(&bRm);
1926 if (IEM_IS_MODRM_MEM_MODE(bRm))
1927 {
1928 /** @todo testcase: check that there are two memory accesses involved. Check
1929 * whether they're both read before the \#BR triggers. */
1930 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1931 {
1932 IEM_MC_BEGIN(3, 1);
1933 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1934 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1935 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1937
1938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1940
1941 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
1942 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1943 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1944
1945 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1946 IEM_MC_END();
1947 }
1948 else /* 32-bit operands */
1949 {
1950 IEM_MC_BEGIN(3, 1);
1951 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1952 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1953 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1955
1956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1958
1959 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
1960 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1961 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1962
1963 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1964 IEM_MC_END();
1965 }
1966 }
1967
1968 /*
1969 * @opdone
1970 */
1971 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1972 {
1973 /* Note that there is no need for the CPU to fetch further bytes
1974 here because MODRM.MOD == 3. */
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return IEMOP_RAISE_INVALID_OPCODE();
1977 }
1978 }
1979 else
1980 {
1981 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1982 * does modr/m read, whereas AMD probably doesn't... */
1983 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1984 {
1985 Log(("evex not supported by the guest CPU!\n"));
1986 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1987 }
1988 IEM_OPCODE_GET_NEXT_U8(&bRm);
1989 }
1990
1991 IEMOP_MNEMONIC(evex, "evex");
1992 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1993 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1994 Log(("evex prefix is not implemented!\n"));
1995 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1996}
1997
1998
1999/** Opcode 0x63 - non-64-bit modes. */
2000FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2001{
2002 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2003 IEMOP_HLP_MIN_286();
2004 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2006
2007 if (IEM_IS_MODRM_REG_MODE(bRm))
2008 {
2009 /* Register */
2010 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2011 IEM_MC_BEGIN(3, 0);
2012 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2013 IEM_MC_ARG(uint16_t, u16Src, 1);
2014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2015
2016 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2017 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2018 IEM_MC_REF_EFLAGS(pEFlags);
2019 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2020
2021 IEM_MC_ADVANCE_RIP();
2022 IEM_MC_END();
2023 }
2024 else
2025 {
2026 /* Memory */
2027 IEM_MC_BEGIN(3, 2);
2028 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2029 IEM_MC_ARG(uint16_t, u16Src, 1);
2030 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2032
2033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2034 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2035 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2036 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2037 IEM_MC_FETCH_EFLAGS(EFlags);
2038 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2039
2040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2041 IEM_MC_COMMIT_EFLAGS(EFlags);
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 return VINF_SUCCESS;
2046
2047}
2048
2049
2050/**
2051 * @opcode 0x63
2052 *
2053 * @note This is a weird one. It works like a regular move instruction if
2054 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2055 * @todo This definitely needs a testcase to verify the odd cases. */
2056FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2057{
2058 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2059
2060 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2062
2063 if (IEM_IS_MODRM_REG_MODE(bRm))
2064 {
2065 /*
2066 * Register to register.
2067 */
2068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2069 IEM_MC_BEGIN(0, 1);
2070 IEM_MC_LOCAL(uint64_t, u64Value);
2071 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2072 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2073 IEM_MC_ADVANCE_RIP();
2074 IEM_MC_END();
2075 }
2076 else
2077 {
2078 /*
2079 * We're loading a register from memory.
2080 */
2081 IEM_MC_BEGIN(0, 2);
2082 IEM_MC_LOCAL(uint64_t, u64Value);
2083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2086 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2087 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 return VINF_SUCCESS;
2092}
2093
2094
2095/**
2096 * @opcode 0x64
2097 * @opmnemonic segfs
2098 * @opmincpu 80386
2099 * @opgroup og_prefixes
2100 */
2101FNIEMOP_DEF(iemOp_seg_FS)
2102{
2103 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2104 IEMOP_HLP_MIN_386();
2105
2106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2107 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2108
2109 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2110 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2111}
2112
2113
2114/**
2115 * @opcode 0x65
2116 * @opmnemonic seggs
2117 * @opmincpu 80386
2118 * @opgroup og_prefixes
2119 */
2120FNIEMOP_DEF(iemOp_seg_GS)
2121{
2122 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2123 IEMOP_HLP_MIN_386();
2124
2125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2126 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2127
2128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2130}
2131
2132
2133/**
2134 * @opcode 0x66
2135 * @opmnemonic opsize
2136 * @openc prefix
2137 * @opmincpu 80386
2138 * @ophints harmless
2139 * @opgroup og_prefixes
2140 */
2141FNIEMOP_DEF(iemOp_op_size)
2142{
2143 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2144 IEMOP_HLP_MIN_386();
2145
2146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2147 iemRecalEffOpSize(pVCpu);
2148
2149 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2150 when REPZ or REPNZ are present. */
2151 if (pVCpu->iem.s.idxPrefix == 0)
2152 pVCpu->iem.s.idxPrefix = 1;
2153
2154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2156}
2157
2158
2159/**
2160 * @opcode 0x67
2161 * @opmnemonic addrsize
2162 * @openc prefix
2163 * @opmincpu 80386
2164 * @ophints harmless
2165 * @opgroup og_prefixes
2166 */
2167FNIEMOP_DEF(iemOp_addr_size)
2168{
2169 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2170 IEMOP_HLP_MIN_386();
2171
2172 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2173 switch (pVCpu->iem.s.enmDefAddrMode)
2174 {
2175 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2176 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2177 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2178 default: AssertFailed();
2179 }
2180
2181 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2182 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2183}
2184
2185
2186/**
2187 * @opcode 0x68
2188 */
2189FNIEMOP_DEF(iemOp_push_Iz)
2190{
2191 IEMOP_MNEMONIC(push_Iz, "push Iz");
2192 IEMOP_HLP_MIN_186();
2193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 {
2198 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_BEGIN(0,0);
2201 IEM_MC_PUSH_U16(u16Imm);
2202 IEM_MC_ADVANCE_RIP();
2203 IEM_MC_END();
2204 return VINF_SUCCESS;
2205 }
2206
2207 case IEMMODE_32BIT:
2208 {
2209 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2211 IEM_MC_BEGIN(0,0);
2212 IEM_MC_PUSH_U32(u32Imm);
2213 IEM_MC_ADVANCE_RIP();
2214 IEM_MC_END();
2215 return VINF_SUCCESS;
2216 }
2217
2218 case IEMMODE_64BIT:
2219 {
2220 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2222 IEM_MC_BEGIN(0,0);
2223 IEM_MC_PUSH_U64(u64Imm);
2224 IEM_MC_ADVANCE_RIP();
2225 IEM_MC_END();
2226 return VINF_SUCCESS;
2227 }
2228
2229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2230 }
2231}
2232
2233
2234/**
2235 * @opcode 0x69
2236 */
2237FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2238{
2239 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2240 IEMOP_HLP_MIN_186();
2241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2243
2244 switch (pVCpu->iem.s.enmEffOpSize)
2245 {
2246 case IEMMODE_16BIT:
2247 {
2248 if (IEM_IS_MODRM_REG_MODE(bRm))
2249 {
2250 /* register operand */
2251 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2253
2254 IEM_MC_BEGIN(3, 1);
2255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2256 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2257 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2258 IEM_MC_LOCAL(uint16_t, u16Tmp);
2259
2260 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2261 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2262 IEM_MC_REF_EFLAGS(pEFlags);
2263 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2264 pu16Dst, u16Src, pEFlags);
2265 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2266
2267 IEM_MC_ADVANCE_RIP();
2268 IEM_MC_END();
2269 }
2270 else
2271 {
2272 /* memory operand */
2273 IEM_MC_BEGIN(3, 2);
2274 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2275 IEM_MC_ARG(uint16_t, u16Src, 1);
2276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2277 IEM_MC_LOCAL(uint16_t, u16Tmp);
2278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2279
2280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2281 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2282 IEM_MC_ASSIGN(u16Src, u16Imm);
2283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2284 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2285 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2286 IEM_MC_REF_EFLAGS(pEFlags);
2287 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2288 pu16Dst, u16Src, pEFlags);
2289 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2290
2291 IEM_MC_ADVANCE_RIP();
2292 IEM_MC_END();
2293 }
2294 return VINF_SUCCESS;
2295 }
2296
2297 case IEMMODE_32BIT:
2298 {
2299 if (IEM_IS_MODRM_REG_MODE(bRm))
2300 {
2301 /* register operand */
2302 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2304
2305 IEM_MC_BEGIN(3, 1);
2306 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2307 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2309 IEM_MC_LOCAL(uint32_t, u32Tmp);
2310
2311 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2312 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2313 IEM_MC_REF_EFLAGS(pEFlags);
2314 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2315 pu32Dst, u32Src, pEFlags);
2316 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2317
2318 IEM_MC_ADVANCE_RIP();
2319 IEM_MC_END();
2320 }
2321 else
2322 {
2323 /* memory operand */
2324 IEM_MC_BEGIN(3, 2);
2325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2326 IEM_MC_ARG(uint32_t, u32Src, 1);
2327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2328 IEM_MC_LOCAL(uint32_t, u32Tmp);
2329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2330
2331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2332 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2333 IEM_MC_ASSIGN(u32Src, u32Imm);
2334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2335 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2336 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2337 IEM_MC_REF_EFLAGS(pEFlags);
2338 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2339 pu32Dst, u32Src, pEFlags);
2340 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2341
2342 IEM_MC_ADVANCE_RIP();
2343 IEM_MC_END();
2344 }
2345 return VINF_SUCCESS;
2346 }
2347
2348 case IEMMODE_64BIT:
2349 {
2350 if (IEM_IS_MODRM_REG_MODE(bRm))
2351 {
2352 /* register operand */
2353 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2355
2356 IEM_MC_BEGIN(3, 1);
2357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2358 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2359 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2360 IEM_MC_LOCAL(uint64_t, u64Tmp);
2361
2362 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2363 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2364 IEM_MC_REF_EFLAGS(pEFlags);
2365 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2366 pu64Dst, u64Src, pEFlags);
2367 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2368
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /* memory operand */
2375 IEM_MC_BEGIN(3, 2);
2376 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2377 IEM_MC_ARG(uint64_t, u64Src, 1);
2378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2379 IEM_MC_LOCAL(uint64_t, u64Tmp);
2380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2381
2382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2383 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2384 IEM_MC_ASSIGN(u64Src, u64Imm);
2385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2386 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2387 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2388 IEM_MC_REF_EFLAGS(pEFlags);
2389 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2390 pu64Dst, u64Src, pEFlags);
2391 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2392
2393 IEM_MC_ADVANCE_RIP();
2394 IEM_MC_END();
2395 }
2396 return VINF_SUCCESS;
2397 }
2398 }
2399 AssertFailedReturn(VERR_IEM_IPE_9);
2400}
2401
2402
2403/**
2404 * @opcode 0x6a
2405 */
2406FNIEMOP_DEF(iemOp_push_Ib)
2407{
2408 IEMOP_MNEMONIC(push_Ib, "push Ib");
2409 IEMOP_HLP_MIN_186();
2410 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2413
2414 IEM_MC_BEGIN(0,0);
2415 switch (pVCpu->iem.s.enmEffOpSize)
2416 {
2417 case IEMMODE_16BIT:
2418 IEM_MC_PUSH_U16(i8Imm);
2419 break;
2420 case IEMMODE_32BIT:
2421 IEM_MC_PUSH_U32(i8Imm);
2422 break;
2423 case IEMMODE_64BIT:
2424 IEM_MC_PUSH_U64(i8Imm);
2425 break;
2426 }
2427 IEM_MC_ADVANCE_RIP();
2428 IEM_MC_END();
2429 return VINF_SUCCESS;
2430}
2431
2432
2433/**
2434 * @opcode 0x6b
2435 */
2436FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2437{
2438 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2439 IEMOP_HLP_MIN_186();
2440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2441 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2442
2443 switch (pVCpu->iem.s.enmEffOpSize)
2444 {
2445 case IEMMODE_16BIT:
2446 if (IEM_IS_MODRM_REG_MODE(bRm))
2447 {
2448 /* register operand */
2449 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451
2452 IEM_MC_BEGIN(3, 1);
2453 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2454 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2455 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2456 IEM_MC_LOCAL(uint16_t, u16Tmp);
2457
2458 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2459 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2460 IEM_MC_REF_EFLAGS(pEFlags);
2461 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2462 pu16Dst, u16Src, pEFlags);
2463 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2464
2465 IEM_MC_ADVANCE_RIP();
2466 IEM_MC_END();
2467 }
2468 else
2469 {
2470 /* memory operand */
2471 IEM_MC_BEGIN(3, 2);
2472 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2473 IEM_MC_ARG(uint16_t, u16Src, 1);
2474 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2475 IEM_MC_LOCAL(uint16_t, u16Tmp);
2476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2477
2478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2479 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2480 IEM_MC_ASSIGN(u16Src, u16Imm);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2483 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2484 IEM_MC_REF_EFLAGS(pEFlags);
2485 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2486 pu16Dst, u16Src, pEFlags);
2487 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2488
2489 IEM_MC_ADVANCE_RIP();
2490 IEM_MC_END();
2491 }
2492 return VINF_SUCCESS;
2493
2494 case IEMMODE_32BIT:
2495 if (IEM_IS_MODRM_REG_MODE(bRm))
2496 {
2497 /* register operand */
2498 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500
2501 IEM_MC_BEGIN(3, 1);
2502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2503 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint32_t, u32Tmp);
2506
2507 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2508 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2509 IEM_MC_REF_EFLAGS(pEFlags);
2510 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2511 pu32Dst, u32Src, pEFlags);
2512 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2513
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 else
2518 {
2519 /* memory operand */
2520 IEM_MC_BEGIN(3, 2);
2521 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2522 IEM_MC_ARG(uint32_t, u32Src, 1);
2523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2524 IEM_MC_LOCAL(uint32_t, u32Tmp);
2525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2526
2527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2528 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2529 IEM_MC_ASSIGN(u32Src, u32Imm);
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2531 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2532 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2533 IEM_MC_REF_EFLAGS(pEFlags);
2534 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2535 pu32Dst, u32Src, pEFlags);
2536 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 return VINF_SUCCESS;
2542
2543 case IEMMODE_64BIT:
2544 if (IEM_IS_MODRM_REG_MODE(bRm))
2545 {
2546 /* register operand */
2547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549
2550 IEM_MC_BEGIN(3, 1);
2551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2552 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2554 IEM_MC_LOCAL(uint64_t, u64Tmp);
2555
2556 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2557 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2558 IEM_MC_REF_EFLAGS(pEFlags);
2559 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2560 pu64Dst, u64Src, pEFlags);
2561 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2562
2563 IEM_MC_ADVANCE_RIP();
2564 IEM_MC_END();
2565 }
2566 else
2567 {
2568 /* memory operand */
2569 IEM_MC_BEGIN(3, 2);
2570 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2571 IEM_MC_ARG(uint64_t, u64Src, 1);
2572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2573 IEM_MC_LOCAL(uint64_t, u64Tmp);
2574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2575
2576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2577 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2578 IEM_MC_ASSIGN(u64Src, u64Imm);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2581 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2582 IEM_MC_REF_EFLAGS(pEFlags);
2583 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2584 pu64Dst, u64Src, pEFlags);
2585 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591 }
2592 AssertFailedReturn(VERR_IEM_IPE_8);
2593}
2594
2595
2596/**
2597 * @opcode 0x6c
2598 */
2599FNIEMOP_DEF(iemOp_insb_Yb_DX)
2600{
2601 IEMOP_HLP_MIN_186();
2602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2603 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2604 {
2605 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2606 switch (pVCpu->iem.s.enmEffAddrMode)
2607 {
2608 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2609 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2610 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2612 }
2613 }
2614 else
2615 {
2616 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2617 switch (pVCpu->iem.s.enmEffAddrMode)
2618 {
2619 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2623 }
2624 }
2625}
2626
2627
2628/**
2629 * @opcode 0x6d
2630 */
2631FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2632{
2633 IEMOP_HLP_MIN_186();
2634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2635 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2636 {
2637 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2638 switch (pVCpu->iem.s.enmEffOpSize)
2639 {
2640 case IEMMODE_16BIT:
2641 switch (pVCpu->iem.s.enmEffAddrMode)
2642 {
2643 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2644 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2645 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2647 }
2648 break;
2649 case IEMMODE_64BIT:
2650 case IEMMODE_32BIT:
2651 switch (pVCpu->iem.s.enmEffAddrMode)
2652 {
2653 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2657 }
2658 break;
2659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2660 }
2661 }
2662 else
2663 {
2664 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2665 switch (pVCpu->iem.s.enmEffOpSize)
2666 {
2667 case IEMMODE_16BIT:
2668 switch (pVCpu->iem.s.enmEffAddrMode)
2669 {
2670 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2671 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2672 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2674 }
2675 break;
2676 case IEMMODE_64BIT:
2677 case IEMMODE_32BIT:
2678 switch (pVCpu->iem.s.enmEffAddrMode)
2679 {
2680 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2681 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2682 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2684 }
2685 break;
2686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2687 }
2688 }
2689}
2690
2691
2692/**
2693 * @opcode 0x6e
2694 */
2695FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2696{
2697 IEMOP_HLP_MIN_186();
2698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2699 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2700 {
2701 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2702 switch (pVCpu->iem.s.enmEffAddrMode)
2703 {
2704 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2705 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2706 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2708 }
2709 }
2710 else
2711 {
2712 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2713 switch (pVCpu->iem.s.enmEffAddrMode)
2714 {
2715 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2719 }
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x6f
2726 */
2727FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2728{
2729 IEMOP_HLP_MIN_186();
2730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2731 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2732 {
2733 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2734 switch (pVCpu->iem.s.enmEffOpSize)
2735 {
2736 case IEMMODE_16BIT:
2737 switch (pVCpu->iem.s.enmEffAddrMode)
2738 {
2739 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2740 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2741 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2743 }
2744 break;
2745 case IEMMODE_64BIT:
2746 case IEMMODE_32BIT:
2747 switch (pVCpu->iem.s.enmEffAddrMode)
2748 {
2749 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2750 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2751 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2753 }
2754 break;
2755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2756 }
2757 }
2758 else
2759 {
2760 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2761 switch (pVCpu->iem.s.enmEffOpSize)
2762 {
2763 case IEMMODE_16BIT:
2764 switch (pVCpu->iem.s.enmEffAddrMode)
2765 {
2766 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2767 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2768 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2770 }
2771 break;
2772 case IEMMODE_64BIT:
2773 case IEMMODE_32BIT:
2774 switch (pVCpu->iem.s.enmEffAddrMode)
2775 {
2776 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2777 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2778 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2780 }
2781 break;
2782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2783 }
2784 }
2785}
2786
2787
2788/**
2789 * @opcode 0x70
2790 */
2791FNIEMOP_DEF(iemOp_jo_Jb)
2792{
2793 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2794 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2797
2798 IEM_MC_BEGIN(0, 0);
2799 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2800 IEM_MC_REL_JMP_S8(i8Imm);
2801 } IEM_MC_ELSE() {
2802 IEM_MC_ADVANCE_RIP();
2803 } IEM_MC_ENDIF();
2804 IEM_MC_END();
2805 return VINF_SUCCESS;
2806}
2807
2808
2809/**
2810 * @opcode 0x71
2811 */
2812FNIEMOP_DEF(iemOp_jno_Jb)
2813{
2814 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2817 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2818
2819 IEM_MC_BEGIN(0, 0);
2820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2821 IEM_MC_ADVANCE_RIP();
2822 } IEM_MC_ELSE() {
2823 IEM_MC_REL_JMP_S8(i8Imm);
2824 } IEM_MC_ENDIF();
2825 IEM_MC_END();
2826 return VINF_SUCCESS;
2827}
2828
2829/**
2830 * @opcode 0x72
2831 */
2832FNIEMOP_DEF(iemOp_jc_Jb)
2833{
2834 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2838
2839 IEM_MC_BEGIN(0, 0);
2840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2841 IEM_MC_REL_JMP_S8(i8Imm);
2842 } IEM_MC_ELSE() {
2843 IEM_MC_ADVANCE_RIP();
2844 } IEM_MC_ENDIF();
2845 IEM_MC_END();
2846 return VINF_SUCCESS;
2847}
2848
2849
2850/**
2851 * @opcode 0x73
2852 */
2853FNIEMOP_DEF(iemOp_jnc_Jb)
2854{
2855 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2856 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2859
2860 IEM_MC_BEGIN(0, 0);
2861 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2862 IEM_MC_ADVANCE_RIP();
2863 } IEM_MC_ELSE() {
2864 IEM_MC_REL_JMP_S8(i8Imm);
2865 } IEM_MC_ENDIF();
2866 IEM_MC_END();
2867 return VINF_SUCCESS;
2868}
2869
2870
2871/**
2872 * @opcode 0x74
2873 */
2874FNIEMOP_DEF(iemOp_je_Jb)
2875{
2876 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2877 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2880
2881 IEM_MC_BEGIN(0, 0);
2882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2883 IEM_MC_REL_JMP_S8(i8Imm);
2884 } IEM_MC_ELSE() {
2885 IEM_MC_ADVANCE_RIP();
2886 } IEM_MC_ENDIF();
2887 IEM_MC_END();
2888 return VINF_SUCCESS;
2889}
2890
2891
2892/**
2893 * @opcode 0x75
2894 */
2895FNIEMOP_DEF(iemOp_jne_Jb)
2896{
2897 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2898 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2901
2902 IEM_MC_BEGIN(0, 0);
2903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2904 IEM_MC_ADVANCE_RIP();
2905 } IEM_MC_ELSE() {
2906 IEM_MC_REL_JMP_S8(i8Imm);
2907 } IEM_MC_ENDIF();
2908 IEM_MC_END();
2909 return VINF_SUCCESS;
2910}
2911
2912
2913/**
2914 * @opcode 0x76
2915 */
2916FNIEMOP_DEF(iemOp_jbe_Jb)
2917{
2918 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2919 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2921 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2922
2923 IEM_MC_BEGIN(0, 0);
2924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2925 IEM_MC_REL_JMP_S8(i8Imm);
2926 } IEM_MC_ELSE() {
2927 IEM_MC_ADVANCE_RIP();
2928 } IEM_MC_ENDIF();
2929 IEM_MC_END();
2930 return VINF_SUCCESS;
2931}
2932
2933
2934/**
2935 * @opcode 0x77
2936 */
2937FNIEMOP_DEF(iemOp_jnbe_Jb)
2938{
2939 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2940 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2943
2944 IEM_MC_BEGIN(0, 0);
2945 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2946 IEM_MC_ADVANCE_RIP();
2947 } IEM_MC_ELSE() {
2948 IEM_MC_REL_JMP_S8(i8Imm);
2949 } IEM_MC_ENDIF();
2950 IEM_MC_END();
2951 return VINF_SUCCESS;
2952}
2953
2954
2955/**
2956 * @opcode 0x78
2957 */
2958FNIEMOP_DEF(iemOp_js_Jb)
2959{
2960 IEMOP_MNEMONIC(js_Jb, "js Jb");
2961 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2964
2965 IEM_MC_BEGIN(0, 0);
2966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2967 IEM_MC_REL_JMP_S8(i8Imm);
2968 } IEM_MC_ELSE() {
2969 IEM_MC_ADVANCE_RIP();
2970 } IEM_MC_ENDIF();
2971 IEM_MC_END();
2972 return VINF_SUCCESS;
2973}
2974
2975
2976/**
2977 * @opcode 0x79
2978 */
2979FNIEMOP_DEF(iemOp_jns_Jb)
2980{
2981 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2982 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2984 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2985
2986 IEM_MC_BEGIN(0, 0);
2987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2988 IEM_MC_ADVANCE_RIP();
2989 } IEM_MC_ELSE() {
2990 IEM_MC_REL_JMP_S8(i8Imm);
2991 } IEM_MC_ENDIF();
2992 IEM_MC_END();
2993 return VINF_SUCCESS;
2994}
2995
2996
2997/**
2998 * @opcode 0x7a
2999 */
3000FNIEMOP_DEF(iemOp_jp_Jb)
3001{
3002 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3003 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3006
3007 IEM_MC_BEGIN(0, 0);
3008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3009 IEM_MC_REL_JMP_S8(i8Imm);
3010 } IEM_MC_ELSE() {
3011 IEM_MC_ADVANCE_RIP();
3012 } IEM_MC_ENDIF();
3013 IEM_MC_END();
3014 return VINF_SUCCESS;
3015}
3016
3017
3018/**
3019 * @opcode 0x7b
3020 */
3021FNIEMOP_DEF(iemOp_jnp_Jb)
3022{
3023 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3024 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3027
3028 IEM_MC_BEGIN(0, 0);
3029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3030 IEM_MC_ADVANCE_RIP();
3031 } IEM_MC_ELSE() {
3032 IEM_MC_REL_JMP_S8(i8Imm);
3033 } IEM_MC_ENDIF();
3034 IEM_MC_END();
3035 return VINF_SUCCESS;
3036}
3037
3038
3039/**
3040 * @opcode 0x7c
3041 */
3042FNIEMOP_DEF(iemOp_jl_Jb)
3043{
3044 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3045 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3047 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3048
3049 IEM_MC_BEGIN(0, 0);
3050 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3051 IEM_MC_REL_JMP_S8(i8Imm);
3052 } IEM_MC_ELSE() {
3053 IEM_MC_ADVANCE_RIP();
3054 } IEM_MC_ENDIF();
3055 IEM_MC_END();
3056 return VINF_SUCCESS;
3057}
3058
3059
3060/**
3061 * @opcode 0x7d
3062 */
3063FNIEMOP_DEF(iemOp_jnl_Jb)
3064{
3065 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3066 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3069
3070 IEM_MC_BEGIN(0, 0);
3071 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3072 IEM_MC_ADVANCE_RIP();
3073 } IEM_MC_ELSE() {
3074 IEM_MC_REL_JMP_S8(i8Imm);
3075 } IEM_MC_ENDIF();
3076 IEM_MC_END();
3077 return VINF_SUCCESS;
3078}
3079
3080
3081/**
3082 * @opcode 0x7e
3083 */
3084FNIEMOP_DEF(iemOp_jle_Jb)
3085{
3086 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3087 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3090
3091 IEM_MC_BEGIN(0, 0);
3092 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3093 IEM_MC_REL_JMP_S8(i8Imm);
3094 } IEM_MC_ELSE() {
3095 IEM_MC_ADVANCE_RIP();
3096 } IEM_MC_ENDIF();
3097 IEM_MC_END();
3098 return VINF_SUCCESS;
3099}
3100
3101
3102/**
3103 * @opcode 0x7f
3104 */
3105FNIEMOP_DEF(iemOp_jnle_Jb)
3106{
3107 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3108 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3111
3112 IEM_MC_BEGIN(0, 0);
3113 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3114 IEM_MC_ADVANCE_RIP();
3115 } IEM_MC_ELSE() {
3116 IEM_MC_REL_JMP_S8(i8Imm);
3117 } IEM_MC_ENDIF();
3118 IEM_MC_END();
3119 return VINF_SUCCESS;
3120}
3121
3122
3123/**
3124 * @opcode 0x80
3125 */
3126FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 switch (IEM_GET_MODRM_REG_8(bRm))
3130 {
3131 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3132 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3133 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3134 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3135 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3136 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3137 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3138 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3139 }
3140 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3141
3142 if (IEM_IS_MODRM_REG_MODE(bRm))
3143 {
3144 /* register target */
3145 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3147 IEM_MC_BEGIN(3, 0);
3148 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3149 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3151
3152 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3153 IEM_MC_REF_EFLAGS(pEFlags);
3154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3155
3156 IEM_MC_ADVANCE_RIP();
3157 IEM_MC_END();
3158 }
3159 else
3160 {
3161 /* memory target */
3162 uint32_t fAccess;
3163 if (pImpl->pfnLockedU8)
3164 fAccess = IEM_ACCESS_DATA_RW;
3165 else /* CMP */
3166 fAccess = IEM_ACCESS_DATA_R;
3167 IEM_MC_BEGIN(3, 2);
3168 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3169 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3171
3172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3173 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3174 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3175 if (pImpl->pfnLockedU8)
3176 IEMOP_HLP_DONE_DECODING();
3177 else
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3179
3180 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3181 IEM_MC_FETCH_EFLAGS(EFlags);
3182 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3184 else
3185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3186
3187 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3188 IEM_MC_COMMIT_EFLAGS(EFlags);
3189 IEM_MC_ADVANCE_RIP();
3190 IEM_MC_END();
3191 }
3192 return VINF_SUCCESS;
3193}
3194
3195
3196/**
3197 * @opcode 0x81
3198 */
3199FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3200{
3201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3202 switch (IEM_GET_MODRM_REG_8(bRm))
3203 {
3204 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3205 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3206 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3207 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3208 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3209 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3210 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3211 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3212 }
3213 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3214
3215 switch (pVCpu->iem.s.enmEffOpSize)
3216 {
3217 case IEMMODE_16BIT:
3218 {
3219 if (IEM_IS_MODRM_REG_MODE(bRm))
3220 {
3221 /* register target */
3222 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_BEGIN(3, 0);
3225 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3226 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3228
3229 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3230 IEM_MC_REF_EFLAGS(pEFlags);
3231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3232
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 }
3236 else
3237 {
3238 /* memory target */
3239 uint32_t fAccess;
3240 if (pImpl->pfnLockedU16)
3241 fAccess = IEM_ACCESS_DATA_RW;
3242 else /* CMP, TEST */
3243 fAccess = IEM_ACCESS_DATA_R;
3244 IEM_MC_BEGIN(3, 2);
3245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3246 IEM_MC_ARG(uint16_t, u16Src, 1);
3247 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3249
3250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3251 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3252 IEM_MC_ASSIGN(u16Src, u16Imm);
3253 if (pImpl->pfnLockedU16)
3254 IEMOP_HLP_DONE_DECODING();
3255 else
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3258 IEM_MC_FETCH_EFLAGS(EFlags);
3259 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3261 else
3262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3263
3264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3265 IEM_MC_COMMIT_EFLAGS(EFlags);
3266 IEM_MC_ADVANCE_RIP();
3267 IEM_MC_END();
3268 }
3269 break;
3270 }
3271
3272 case IEMMODE_32BIT:
3273 {
3274 if (IEM_IS_MODRM_REG_MODE(bRm))
3275 {
3276 /* register target */
3277 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 IEM_MC_BEGIN(3, 0);
3280 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3281 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3283
3284 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3285 IEM_MC_REF_EFLAGS(pEFlags);
3286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3287 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3288 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3289
3290 IEM_MC_ADVANCE_RIP();
3291 IEM_MC_END();
3292 }
3293 else
3294 {
3295 /* memory target */
3296 uint32_t fAccess;
3297 if (pImpl->pfnLockedU32)
3298 fAccess = IEM_ACCESS_DATA_RW;
3299 else /* CMP, TEST */
3300 fAccess = IEM_ACCESS_DATA_R;
3301 IEM_MC_BEGIN(3, 2);
3302 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3303 IEM_MC_ARG(uint32_t, u32Src, 1);
3304 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3306
3307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3308 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3309 IEM_MC_ASSIGN(u32Src, u32Imm);
3310 if (pImpl->pfnLockedU32)
3311 IEMOP_HLP_DONE_DECODING();
3312 else
3313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3314 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3315 IEM_MC_FETCH_EFLAGS(EFlags);
3316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3318 else
3319 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3320
3321 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3322 IEM_MC_COMMIT_EFLAGS(EFlags);
3323 IEM_MC_ADVANCE_RIP();
3324 IEM_MC_END();
3325 }
3326 break;
3327 }
3328
3329 case IEMMODE_64BIT:
3330 {
3331 if (IEM_IS_MODRM_REG_MODE(bRm))
3332 {
3333 /* register target */
3334 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 IEM_MC_BEGIN(3, 0);
3337 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3338 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3340
3341 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3342 IEM_MC_REF_EFLAGS(pEFlags);
3343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3344
3345 IEM_MC_ADVANCE_RIP();
3346 IEM_MC_END();
3347 }
3348 else
3349 {
3350 /* memory target */
3351 uint32_t fAccess;
3352 if (pImpl->pfnLockedU64)
3353 fAccess = IEM_ACCESS_DATA_RW;
3354 else /* CMP */
3355 fAccess = IEM_ACCESS_DATA_R;
3356 IEM_MC_BEGIN(3, 2);
3357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3358 IEM_MC_ARG(uint64_t, u64Src, 1);
3359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3361
3362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3363 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3364 if (pImpl->pfnLockedU64)
3365 IEMOP_HLP_DONE_DECODING();
3366 else
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 IEM_MC_ASSIGN(u64Src, u64Imm);
3369 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3370 IEM_MC_FETCH_EFLAGS(EFlags);
3371 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3373 else
3374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3375
3376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3377 IEM_MC_COMMIT_EFLAGS(EFlags);
3378 IEM_MC_ADVANCE_RIP();
3379 IEM_MC_END();
3380 }
3381 break;
3382 }
3383 }
3384 return VINF_SUCCESS;
3385}
3386
3387
3388/**
3389 * @opcode 0x82
3390 * @opmnemonic grp1_82
3391 * @opgroup og_groups
3392 */
3393FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3394{
3395 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3396 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3397}
3398
3399
3400/**
3401 * @opcode 0x83
3402 */
3403FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3404{
3405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3406 switch (IEM_GET_MODRM_REG_8(bRm))
3407 {
3408 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3409 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3410 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3411 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3412 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3413 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3414 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3415 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3416 }
3417 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3418 to the 386 even if absent in the intel reference manuals and some
3419 3rd party opcode listings. */
3420 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3421
3422 if (IEM_IS_MODRM_REG_MODE(bRm))
3423 {
3424 /*
3425 * Register target
3426 */
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3429 switch (pVCpu->iem.s.enmEffOpSize)
3430 {
3431 case IEMMODE_16BIT:
3432 {
3433 IEM_MC_BEGIN(3, 0);
3434 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3435 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3436 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3437
3438 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3439 IEM_MC_REF_EFLAGS(pEFlags);
3440 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3441
3442 IEM_MC_ADVANCE_RIP();
3443 IEM_MC_END();
3444 break;
3445 }
3446
3447 case IEMMODE_32BIT:
3448 {
3449 IEM_MC_BEGIN(3, 0);
3450 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3451 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3452 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3453
3454 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3455 IEM_MC_REF_EFLAGS(pEFlags);
3456 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3457 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3458 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 break;
3463 }
3464
3465 case IEMMODE_64BIT:
3466 {
3467 IEM_MC_BEGIN(3, 0);
3468 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3469 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3470 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3471
3472 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3473 IEM_MC_REF_EFLAGS(pEFlags);
3474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3475
3476 IEM_MC_ADVANCE_RIP();
3477 IEM_MC_END();
3478 break;
3479 }
3480 }
3481 }
3482 else
3483 {
3484 /*
3485 * Memory target.
3486 */
3487 uint32_t fAccess;
3488 if (pImpl->pfnLockedU16)
3489 fAccess = IEM_ACCESS_DATA_RW;
3490 else /* CMP */
3491 fAccess = IEM_ACCESS_DATA_R;
3492
3493 switch (pVCpu->iem.s.enmEffOpSize)
3494 {
3495 case IEMMODE_16BIT:
3496 {
3497 IEM_MC_BEGIN(3, 2);
3498 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3499 IEM_MC_ARG(uint16_t, u16Src, 1);
3500 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3502
3503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3504 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3505 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3506 if (pImpl->pfnLockedU16)
3507 IEMOP_HLP_DONE_DECODING();
3508 else
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3511 IEM_MC_FETCH_EFLAGS(EFlags);
3512 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3514 else
3515 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3516
3517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3518 IEM_MC_COMMIT_EFLAGS(EFlags);
3519 IEM_MC_ADVANCE_RIP();
3520 IEM_MC_END();
3521 break;
3522 }
3523
3524 case IEMMODE_32BIT:
3525 {
3526 IEM_MC_BEGIN(3, 2);
3527 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3528 IEM_MC_ARG(uint32_t, u32Src, 1);
3529 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3531
3532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3533 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3534 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3535 if (pImpl->pfnLockedU32)
3536 IEMOP_HLP_DONE_DECODING();
3537 else
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3540 IEM_MC_FETCH_EFLAGS(EFlags);
3541 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3543 else
3544 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3545
3546 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3547 IEM_MC_COMMIT_EFLAGS(EFlags);
3548 IEM_MC_ADVANCE_RIP();
3549 IEM_MC_END();
3550 break;
3551 }
3552
3553 case IEMMODE_64BIT:
3554 {
3555 IEM_MC_BEGIN(3, 2);
3556 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3557 IEM_MC_ARG(uint64_t, u64Src, 1);
3558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3560
3561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3562 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3563 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3564 if (pImpl->pfnLockedU64)
3565 IEMOP_HLP_DONE_DECODING();
3566 else
3567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3568 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3569 IEM_MC_FETCH_EFLAGS(EFlags);
3570 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3571 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3572 else
3573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3574
3575 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3576 IEM_MC_COMMIT_EFLAGS(EFlags);
3577 IEM_MC_ADVANCE_RIP();
3578 IEM_MC_END();
3579 break;
3580 }
3581 }
3582 }
3583 return VINF_SUCCESS;
3584}
3585
3586
3587/**
3588 * @opcode 0x84
3589 */
3590FNIEMOP_DEF(iemOp_test_Eb_Gb)
3591{
3592 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3593 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3594 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3595}
3596
3597
3598/**
3599 * @opcode 0x85
3600 */
3601FNIEMOP_DEF(iemOp_test_Ev_Gv)
3602{
3603 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3604 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3605 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3606}
3607
3608
3609/**
3610 * @opcode 0x86
3611 */
3612FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3613{
3614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3615 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3616
3617 /*
3618 * If rm is denoting a register, no more instruction bytes.
3619 */
3620 if (IEM_IS_MODRM_REG_MODE(bRm))
3621 {
3622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3623
3624 IEM_MC_BEGIN(0, 2);
3625 IEM_MC_LOCAL(uint8_t, uTmp1);
3626 IEM_MC_LOCAL(uint8_t, uTmp2);
3627
3628 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3629 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3630 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3631 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3632
3633 IEM_MC_ADVANCE_RIP();
3634 IEM_MC_END();
3635 }
3636 else
3637 {
3638 /*
3639 * We're accessing memory.
3640 */
3641/** @todo the register must be committed separately! */
3642 IEM_MC_BEGIN(2, 2);
3643 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3644 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3646
3647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3648 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3649 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3650 if (!pVCpu->iem.s.fDisregardLock)
3651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3652 else
3653 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3655
3656 IEM_MC_ADVANCE_RIP();
3657 IEM_MC_END();
3658 }
3659 return VINF_SUCCESS;
3660}
3661
3662
3663/**
3664 * @opcode 0x87
3665 */
3666FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3667{
3668 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3670
3671 /*
3672 * If rm is denoting a register, no more instruction bytes.
3673 */
3674 if (IEM_IS_MODRM_REG_MODE(bRm))
3675 {
3676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3677
3678 switch (pVCpu->iem.s.enmEffOpSize)
3679 {
3680 case IEMMODE_16BIT:
3681 IEM_MC_BEGIN(0, 2);
3682 IEM_MC_LOCAL(uint16_t, uTmp1);
3683 IEM_MC_LOCAL(uint16_t, uTmp2);
3684
3685 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3686 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3687 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3688 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3689
3690 IEM_MC_ADVANCE_RIP();
3691 IEM_MC_END();
3692 return VINF_SUCCESS;
3693
3694 case IEMMODE_32BIT:
3695 IEM_MC_BEGIN(0, 2);
3696 IEM_MC_LOCAL(uint32_t, uTmp1);
3697 IEM_MC_LOCAL(uint32_t, uTmp2);
3698
3699 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3700 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3701 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3702 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3703
3704 IEM_MC_ADVANCE_RIP();
3705 IEM_MC_END();
3706 return VINF_SUCCESS;
3707
3708 case IEMMODE_64BIT:
3709 IEM_MC_BEGIN(0, 2);
3710 IEM_MC_LOCAL(uint64_t, uTmp1);
3711 IEM_MC_LOCAL(uint64_t, uTmp2);
3712
3713 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3714 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3715 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3716 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 return VINF_SUCCESS;
3721
3722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3723 }
3724 }
3725 else
3726 {
3727 /*
3728 * We're accessing memory.
3729 */
3730 switch (pVCpu->iem.s.enmEffOpSize)
3731 {
3732/** @todo the register must be committed separately! */
3733 case IEMMODE_16BIT:
3734 IEM_MC_BEGIN(2, 2);
3735 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3736 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3738
3739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3740 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3741 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3742 if (!pVCpu->iem.s.fDisregardLock)
3743 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3744 else
3745 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3746 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3747
3748 IEM_MC_ADVANCE_RIP();
3749 IEM_MC_END();
3750 return VINF_SUCCESS;
3751
3752 case IEMMODE_32BIT:
3753 IEM_MC_BEGIN(2, 2);
3754 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3755 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3757
3758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3759 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3760 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3761 if (!pVCpu->iem.s.fDisregardLock)
3762 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3763 else
3764 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3765 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3766
3767 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3768 IEM_MC_ADVANCE_RIP();
3769 IEM_MC_END();
3770 return VINF_SUCCESS;
3771
3772 case IEMMODE_64BIT:
3773 IEM_MC_BEGIN(2, 2);
3774 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3775 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3777
3778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3779 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3780 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3781 if (!pVCpu->iem.s.fDisregardLock)
3782 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3783 else
3784 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3785 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3786
3787 IEM_MC_ADVANCE_RIP();
3788 IEM_MC_END();
3789 return VINF_SUCCESS;
3790
3791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3792 }
3793 }
3794}
3795
3796
3797/**
3798 * @opcode 0x88
3799 */
3800FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3801{
3802 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3803
3804 uint8_t bRm;
3805 IEM_OPCODE_GET_NEXT_U8(&bRm);
3806
3807 /*
3808 * If rm is denoting a register, no more instruction bytes.
3809 */
3810 if (IEM_IS_MODRM_REG_MODE(bRm))
3811 {
3812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3813 IEM_MC_BEGIN(0, 1);
3814 IEM_MC_LOCAL(uint8_t, u8Value);
3815 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3816 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
3817 IEM_MC_ADVANCE_RIP();
3818 IEM_MC_END();
3819 }
3820 else
3821 {
3822 /*
3823 * We're writing a register to memory.
3824 */
3825 IEM_MC_BEGIN(0, 2);
3826 IEM_MC_LOCAL(uint8_t, u8Value);
3827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3830 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3831 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3832 IEM_MC_ADVANCE_RIP();
3833 IEM_MC_END();
3834 }
3835 return VINF_SUCCESS;
3836
3837}
3838
3839
3840/**
3841 * @opcode 0x89
3842 */
3843FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3844{
3845 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3846
3847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3848
3849 /*
3850 * If rm is denoting a register, no more instruction bytes.
3851 */
3852 if (IEM_IS_MODRM_REG_MODE(bRm))
3853 {
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3855 switch (pVCpu->iem.s.enmEffOpSize)
3856 {
3857 case IEMMODE_16BIT:
3858 IEM_MC_BEGIN(0, 1);
3859 IEM_MC_LOCAL(uint16_t, u16Value);
3860 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3861 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
3862 IEM_MC_ADVANCE_RIP();
3863 IEM_MC_END();
3864 break;
3865
3866 case IEMMODE_32BIT:
3867 IEM_MC_BEGIN(0, 1);
3868 IEM_MC_LOCAL(uint32_t, u32Value);
3869 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3870 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
3871 IEM_MC_ADVANCE_RIP();
3872 IEM_MC_END();
3873 break;
3874
3875 case IEMMODE_64BIT:
3876 IEM_MC_BEGIN(0, 1);
3877 IEM_MC_LOCAL(uint64_t, u64Value);
3878 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3879 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
3880 IEM_MC_ADVANCE_RIP();
3881 IEM_MC_END();
3882 break;
3883 }
3884 }
3885 else
3886 {
3887 /*
3888 * We're writing a register to memory.
3889 */
3890 switch (pVCpu->iem.s.enmEffOpSize)
3891 {
3892 case IEMMODE_16BIT:
3893 IEM_MC_BEGIN(0, 2);
3894 IEM_MC_LOCAL(uint16_t, u16Value);
3895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3898 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3899 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3900 IEM_MC_ADVANCE_RIP();
3901 IEM_MC_END();
3902 break;
3903
3904 case IEMMODE_32BIT:
3905 IEM_MC_BEGIN(0, 2);
3906 IEM_MC_LOCAL(uint32_t, u32Value);
3907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3910 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3911 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3912 IEM_MC_ADVANCE_RIP();
3913 IEM_MC_END();
3914 break;
3915
3916 case IEMMODE_64BIT:
3917 IEM_MC_BEGIN(0, 2);
3918 IEM_MC_LOCAL(uint64_t, u64Value);
3919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3922 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3923 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3924 IEM_MC_ADVANCE_RIP();
3925 IEM_MC_END();
3926 break;
3927 }
3928 }
3929 return VINF_SUCCESS;
3930}
3931
3932
3933/**
3934 * @opcode 0x8a
3935 */
3936FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3937{
3938 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3939
3940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3941
3942 /*
3943 * If rm is denoting a register, no more instruction bytes.
3944 */
3945 if (IEM_IS_MODRM_REG_MODE(bRm))
3946 {
3947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3948 IEM_MC_BEGIN(0, 1);
3949 IEM_MC_LOCAL(uint8_t, u8Value);
3950 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3951 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3952 IEM_MC_ADVANCE_RIP();
3953 IEM_MC_END();
3954 }
3955 else
3956 {
3957 /*
3958 * We're loading a register from memory.
3959 */
3960 IEM_MC_BEGIN(0, 2);
3961 IEM_MC_LOCAL(uint8_t, u8Value);
3962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3965 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3966 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3967 IEM_MC_ADVANCE_RIP();
3968 IEM_MC_END();
3969 }
3970 return VINF_SUCCESS;
3971}
3972
3973
3974/**
3975 * @opcode 0x8b
3976 */
3977FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3978{
3979 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3980
3981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3982
3983 /*
3984 * If rm is denoting a register, no more instruction bytes.
3985 */
3986 if (IEM_IS_MODRM_REG_MODE(bRm))
3987 {
3988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3989 switch (pVCpu->iem.s.enmEffOpSize)
3990 {
3991 case IEMMODE_16BIT:
3992 IEM_MC_BEGIN(0, 1);
3993 IEM_MC_LOCAL(uint16_t, u16Value);
3994 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3995 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
3996 IEM_MC_ADVANCE_RIP();
3997 IEM_MC_END();
3998 break;
3999
4000 case IEMMODE_32BIT:
4001 IEM_MC_BEGIN(0, 1);
4002 IEM_MC_LOCAL(uint32_t, u32Value);
4003 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4004 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4005 IEM_MC_ADVANCE_RIP();
4006 IEM_MC_END();
4007 break;
4008
4009 case IEMMODE_64BIT:
4010 IEM_MC_BEGIN(0, 1);
4011 IEM_MC_LOCAL(uint64_t, u64Value);
4012 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4013 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4014 IEM_MC_ADVANCE_RIP();
4015 IEM_MC_END();
4016 break;
4017 }
4018 }
4019 else
4020 {
4021 /*
4022 * We're loading a register from memory.
4023 */
4024 switch (pVCpu->iem.s.enmEffOpSize)
4025 {
4026 case IEMMODE_16BIT:
4027 IEM_MC_BEGIN(0, 2);
4028 IEM_MC_LOCAL(uint16_t, u16Value);
4029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4032 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4033 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4034 IEM_MC_ADVANCE_RIP();
4035 IEM_MC_END();
4036 break;
4037
4038 case IEMMODE_32BIT:
4039 IEM_MC_BEGIN(0, 2);
4040 IEM_MC_LOCAL(uint32_t, u32Value);
4041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4044 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4045 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4046 IEM_MC_ADVANCE_RIP();
4047 IEM_MC_END();
4048 break;
4049
4050 case IEMMODE_64BIT:
4051 IEM_MC_BEGIN(0, 2);
4052 IEM_MC_LOCAL(uint64_t, u64Value);
4053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4056 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4057 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4058 IEM_MC_ADVANCE_RIP();
4059 IEM_MC_END();
4060 break;
4061 }
4062 }
4063 return VINF_SUCCESS;
4064}
4065
4066
4067/**
4068 * opcode 0x63
4069 * @todo Table fixme
4070 */
4071FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4072{
4073 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4074 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4075 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4076 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4077 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4078}
4079
4080
4081/**
4082 * @opcode 0x8c
4083 */
4084FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4085{
4086 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4087
4088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4089
4090 /*
4091 * Check that the destination register exists. The REX.R prefix is ignored.
4092 */
4093 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4094 if ( iSegReg > X86_SREG_GS)
4095 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4096
4097 /*
4098 * If rm is denoting a register, no more instruction bytes.
4099 * In that case, the operand size is respected and the upper bits are
4100 * cleared (starting with some pentium).
4101 */
4102 if (IEM_IS_MODRM_REG_MODE(bRm))
4103 {
4104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4105 switch (pVCpu->iem.s.enmEffOpSize)
4106 {
4107 case IEMMODE_16BIT:
4108 IEM_MC_BEGIN(0, 1);
4109 IEM_MC_LOCAL(uint16_t, u16Value);
4110 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4111 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4112 IEM_MC_ADVANCE_RIP();
4113 IEM_MC_END();
4114 break;
4115
4116 case IEMMODE_32BIT:
4117 IEM_MC_BEGIN(0, 1);
4118 IEM_MC_LOCAL(uint32_t, u32Value);
4119 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4120 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4121 IEM_MC_ADVANCE_RIP();
4122 IEM_MC_END();
4123 break;
4124
4125 case IEMMODE_64BIT:
4126 IEM_MC_BEGIN(0, 1);
4127 IEM_MC_LOCAL(uint64_t, u64Value);
4128 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4129 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 break;
4133 }
4134 }
4135 else
4136 {
4137 /*
4138 * We're saving the register to memory. The access is word sized
4139 * regardless of operand size prefixes.
4140 */
4141#if 0 /* not necessary */
4142 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4143#endif
4144 IEM_MC_BEGIN(0, 2);
4145 IEM_MC_LOCAL(uint16_t, u16Value);
4146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4149 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4150 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4151 IEM_MC_ADVANCE_RIP();
4152 IEM_MC_END();
4153 }
4154 return VINF_SUCCESS;
4155}
4156
4157
4158
4159
4160/**
4161 * @opcode 0x8d
4162 */
4163FNIEMOP_DEF(iemOp_lea_Gv_M)
4164{
4165 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4167 if (IEM_IS_MODRM_REG_MODE(bRm))
4168 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4169
4170 switch (pVCpu->iem.s.enmEffOpSize)
4171 {
4172 case IEMMODE_16BIT:
4173 IEM_MC_BEGIN(0, 2);
4174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4175 IEM_MC_LOCAL(uint16_t, u16Cast);
4176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4178 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4179 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
4180 IEM_MC_ADVANCE_RIP();
4181 IEM_MC_END();
4182 return VINF_SUCCESS;
4183
4184 case IEMMODE_32BIT:
4185 IEM_MC_BEGIN(0, 2);
4186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4187 IEM_MC_LOCAL(uint32_t, u32Cast);
4188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4190 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4191 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
4192 IEM_MC_ADVANCE_RIP();
4193 IEM_MC_END();
4194 return VINF_SUCCESS;
4195
4196 case IEMMODE_64BIT:
4197 IEM_MC_BEGIN(0, 1);
4198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4201 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
4202 IEM_MC_ADVANCE_RIP();
4203 IEM_MC_END();
4204 return VINF_SUCCESS;
4205 }
4206 AssertFailedReturn(VERR_IEM_IPE_7);
4207}
4208
4209
4210/**
4211 * @opcode 0x8e
4212 */
4213FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4214{
4215 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4216
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218
4219 /*
4220 * The practical operand size is 16-bit.
4221 */
4222#if 0 /* not necessary */
4223 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4224#endif
4225
4226 /*
4227 * Check that the destination register exists and can be used with this
4228 * instruction. The REX.R prefix is ignored.
4229 */
4230 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4231 if ( iSegReg == X86_SREG_CS
4232 || iSegReg > X86_SREG_GS)
4233 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4234
4235 /*
4236 * If rm is denoting a register, no more instruction bytes.
4237 */
4238 if (IEM_IS_MODRM_REG_MODE(bRm))
4239 {
4240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4241 IEM_MC_BEGIN(2, 0);
4242 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4243 IEM_MC_ARG(uint16_t, u16Value, 1);
4244 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4245 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4246 IEM_MC_END();
4247 }
4248 else
4249 {
4250 /*
4251 * We're loading the register from memory. The access is word sized
4252 * regardless of operand size prefixes.
4253 */
4254 IEM_MC_BEGIN(2, 1);
4255 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4256 IEM_MC_ARG(uint16_t, u16Value, 1);
4257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4260 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4261 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4262 IEM_MC_END();
4263 }
4264 return VINF_SUCCESS;
4265}
4266
4267
4268/** Opcode 0x8f /0. */
4269FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4270{
4271 /* This bugger is rather annoying as it requires rSP to be updated before
4272 doing the effective address calculations. Will eventually require a
4273 split between the R/M+SIB decoding and the effective address
4274 calculation - which is something that is required for any attempt at
4275 reusing this code for a recompiler. It may also be good to have if we
4276 need to delay #UD exception caused by invalid lock prefixes.
4277
4278 For now, we'll do a mostly safe interpreter-only implementation here. */
4279 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4280 * now until tests show it's checked.. */
4281 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4282
4283 /* Register access is relatively easy and can share code. */
4284 if (IEM_IS_MODRM_REG_MODE(bRm))
4285 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
4286
4287 /*
4288 * Memory target.
4289 *
4290 * Intel says that RSP is incremented before it's used in any effective
4291 * address calcuations. This means some serious extra annoyance here since
4292 * we decode and calculate the effective address in one step and like to
4293 * delay committing registers till everything is done.
4294 *
4295 * So, we'll decode and calculate the effective address twice. This will
4296 * require some recoding if turned into a recompiler.
4297 */
4298 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4299
4300#ifndef TST_IEM_CHECK_MC
4301 /* Calc effective address with modified ESP. */
4302/** @todo testcase */
4303 RTGCPTR GCPtrEff;
4304 VBOXSTRICTRC rcStrict;
4305 switch (pVCpu->iem.s.enmEffOpSize)
4306 {
4307 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4308 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4309 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4311 }
4312 if (rcStrict != VINF_SUCCESS)
4313 return rcStrict;
4314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4315
4316 /* Perform the operation - this should be CImpl. */
4317 RTUINT64U TmpRsp;
4318 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4319 switch (pVCpu->iem.s.enmEffOpSize)
4320 {
4321 case IEMMODE_16BIT:
4322 {
4323 uint16_t u16Value;
4324 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4325 if (rcStrict == VINF_SUCCESS)
4326 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4327 break;
4328 }
4329
4330 case IEMMODE_32BIT:
4331 {
4332 uint32_t u32Value;
4333 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4334 if (rcStrict == VINF_SUCCESS)
4335 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4336 break;
4337 }
4338
4339 case IEMMODE_64BIT:
4340 {
4341 uint64_t u64Value;
4342 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4343 if (rcStrict == VINF_SUCCESS)
4344 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4345 break;
4346 }
4347
4348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4349 }
4350 if (rcStrict == VINF_SUCCESS)
4351 {
4352 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4353 iemRegUpdateRipAndClearRF(pVCpu);
4354 }
4355 return rcStrict;
4356
4357#else
4358 return VERR_IEM_IPE_2;
4359#endif
4360}
4361
4362
4363/**
4364 * @opcode 0x8f
4365 */
4366FNIEMOP_DEF(iemOp_Grp1A__xop)
4367{
4368 /*
4369 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4370 * three byte VEX prefix, except that the mmmmm field cannot have the values
4371 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4372 */
4373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4374 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4375 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4376
4377 IEMOP_MNEMONIC(xop, "xop");
4378 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4379 {
4380 /** @todo Test when exctly the XOP conformance checks kick in during
4381 * instruction decoding and fetching (using \#PF). */
4382 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4383 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4384 if ( ( pVCpu->iem.s.fPrefixes
4385 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4386 == 0)
4387 {
4388 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4389 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4390 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4391 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4392 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4393 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4394 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4395 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4396 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4397
4398 /** @todo XOP: Just use new tables and decoders. */
4399 switch (bRm & 0x1f)
4400 {
4401 case 8: /* xop opcode map 8. */
4402 IEMOP_BITCH_ABOUT_STUB();
4403 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4404
4405 case 9: /* xop opcode map 9. */
4406 IEMOP_BITCH_ABOUT_STUB();
4407 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4408
4409 case 10: /* xop opcode map 10. */
4410 IEMOP_BITCH_ABOUT_STUB();
4411 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4412
4413 default:
4414 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4415 return IEMOP_RAISE_INVALID_OPCODE();
4416 }
4417 }
4418 else
4419 Log(("XOP: Invalid prefix mix!\n"));
4420 }
4421 else
4422 Log(("XOP: XOP support disabled!\n"));
4423 return IEMOP_RAISE_INVALID_OPCODE();
4424}
4425
4426
4427/**
4428 * Common 'xchg reg,rAX' helper.
4429 */
4430FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4431{
4432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4433
4434 iReg |= pVCpu->iem.s.uRexB;
4435 switch (pVCpu->iem.s.enmEffOpSize)
4436 {
4437 case IEMMODE_16BIT:
4438 IEM_MC_BEGIN(0, 2);
4439 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4440 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4441 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4442 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4443 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4444 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4445 IEM_MC_ADVANCE_RIP();
4446 IEM_MC_END();
4447 return VINF_SUCCESS;
4448
4449 case IEMMODE_32BIT:
4450 IEM_MC_BEGIN(0, 2);
4451 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4452 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4453 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4454 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4455 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4456 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4457 IEM_MC_ADVANCE_RIP();
4458 IEM_MC_END();
4459 return VINF_SUCCESS;
4460
4461 case IEMMODE_64BIT:
4462 IEM_MC_BEGIN(0, 2);
4463 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4464 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4465 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4466 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4467 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4468 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4469 IEM_MC_ADVANCE_RIP();
4470 IEM_MC_END();
4471 return VINF_SUCCESS;
4472
4473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4474 }
4475}
4476
4477
4478/**
4479 * @opcode 0x90
4480 */
4481FNIEMOP_DEF(iemOp_nop)
4482{
4483 /* R8/R8D and RAX/EAX can be exchanged. */
4484 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4485 {
4486 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4487 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4488 }
4489
4490 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4491 {
4492 IEMOP_MNEMONIC(pause, "pause");
4493#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4494 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4495 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4496#endif
4497#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4498 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4499 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4500#endif
4501 }
4502 else
4503 IEMOP_MNEMONIC(nop, "nop");
4504 IEM_MC_BEGIN(0, 0);
4505 IEM_MC_ADVANCE_RIP();
4506 IEM_MC_END();
4507 return VINF_SUCCESS;
4508}
4509
4510
4511/**
4512 * @opcode 0x91
4513 */
4514FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4515{
4516 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4517 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4518}
4519
4520
4521/**
4522 * @opcode 0x92
4523 */
4524FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4525{
4526 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4527 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4528}
4529
4530
4531/**
4532 * @opcode 0x93
4533 */
4534FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4535{
4536 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4538}
4539
4540
4541/**
4542 * @opcode 0x94
4543 */
4544FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4545{
4546 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4547 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4548}
4549
4550
4551/**
4552 * @opcode 0x95
4553 */
4554FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4555{
4556 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4557 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4558}
4559
4560
4561/**
4562 * @opcode 0x96
4563 */
4564FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4565{
4566 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4567 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4568}
4569
4570
4571/**
4572 * @opcode 0x97
4573 */
4574FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4575{
4576 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4577 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4578}
4579
4580
4581/**
4582 * @opcode 0x98
4583 */
4584FNIEMOP_DEF(iemOp_cbw)
4585{
4586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4587 switch (pVCpu->iem.s.enmEffOpSize)
4588 {
4589 case IEMMODE_16BIT:
4590 IEMOP_MNEMONIC(cbw, "cbw");
4591 IEM_MC_BEGIN(0, 1);
4592 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4593 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4594 } IEM_MC_ELSE() {
4595 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4596 } IEM_MC_ENDIF();
4597 IEM_MC_ADVANCE_RIP();
4598 IEM_MC_END();
4599 return VINF_SUCCESS;
4600
4601 case IEMMODE_32BIT:
4602 IEMOP_MNEMONIC(cwde, "cwde");
4603 IEM_MC_BEGIN(0, 1);
4604 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4605 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4606 } IEM_MC_ELSE() {
4607 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4608 } IEM_MC_ENDIF();
4609 IEM_MC_ADVANCE_RIP();
4610 IEM_MC_END();
4611 return VINF_SUCCESS;
4612
4613 case IEMMODE_64BIT:
4614 IEMOP_MNEMONIC(cdqe, "cdqe");
4615 IEM_MC_BEGIN(0, 1);
4616 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4617 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4618 } IEM_MC_ELSE() {
4619 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4620 } IEM_MC_ENDIF();
4621 IEM_MC_ADVANCE_RIP();
4622 IEM_MC_END();
4623 return VINF_SUCCESS;
4624
4625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4626 }
4627}
4628
4629
4630/**
4631 * @opcode 0x99
4632 */
4633FNIEMOP_DEF(iemOp_cwd)
4634{
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4636 switch (pVCpu->iem.s.enmEffOpSize)
4637 {
4638 case IEMMODE_16BIT:
4639 IEMOP_MNEMONIC(cwd, "cwd");
4640 IEM_MC_BEGIN(0, 1);
4641 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4642 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4643 } IEM_MC_ELSE() {
4644 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4645 } IEM_MC_ENDIF();
4646 IEM_MC_ADVANCE_RIP();
4647 IEM_MC_END();
4648 return VINF_SUCCESS;
4649
4650 case IEMMODE_32BIT:
4651 IEMOP_MNEMONIC(cdq, "cdq");
4652 IEM_MC_BEGIN(0, 1);
4653 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4654 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4655 } IEM_MC_ELSE() {
4656 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4657 } IEM_MC_ENDIF();
4658 IEM_MC_ADVANCE_RIP();
4659 IEM_MC_END();
4660 return VINF_SUCCESS;
4661
4662 case IEMMODE_64BIT:
4663 IEMOP_MNEMONIC(cqo, "cqo");
4664 IEM_MC_BEGIN(0, 1);
4665 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4666 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4667 } IEM_MC_ELSE() {
4668 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4669 } IEM_MC_ENDIF();
4670 IEM_MC_ADVANCE_RIP();
4671 IEM_MC_END();
4672 return VINF_SUCCESS;
4673
4674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4675 }
4676}
4677
4678
4679/**
4680 * @opcode 0x9a
4681 */
4682FNIEMOP_DEF(iemOp_call_Ap)
4683{
4684 IEMOP_MNEMONIC(call_Ap, "call Ap");
4685 IEMOP_HLP_NO_64BIT();
4686
4687 /* Decode the far pointer address and pass it on to the far call C implementation. */
4688 uint32_t offSeg;
4689 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4690 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4691 else
4692 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4693 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4696}
4697
4698
4699/** Opcode 0x9b. (aka fwait) */
4700FNIEMOP_DEF(iemOp_wait)
4701{
4702 IEMOP_MNEMONIC(wait, "wait");
4703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4704
4705 IEM_MC_BEGIN(0, 0);
4706 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4708 IEM_MC_ADVANCE_RIP();
4709 IEM_MC_END();
4710 return VINF_SUCCESS;
4711}
4712
4713
4714/**
4715 * @opcode 0x9c
4716 */
4717FNIEMOP_DEF(iemOp_pushf_Fv)
4718{
4719 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4721 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4722 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4723}
4724
4725
4726/**
4727 * @opcode 0x9d
4728 */
4729FNIEMOP_DEF(iemOp_popf_Fv)
4730{
4731 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4734 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4735}
4736
4737
4738/**
4739 * @opcode 0x9e
4740 */
4741FNIEMOP_DEF(iemOp_sahf)
4742{
4743 IEMOP_MNEMONIC(sahf, "sahf");
4744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4745 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4746 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4747 return IEMOP_RAISE_INVALID_OPCODE();
4748 IEM_MC_BEGIN(0, 2);
4749 IEM_MC_LOCAL(uint32_t, u32Flags);
4750 IEM_MC_LOCAL(uint32_t, EFlags);
4751 IEM_MC_FETCH_EFLAGS(EFlags);
4752 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4753 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4754 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4755 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4756 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4757 IEM_MC_COMMIT_EFLAGS(EFlags);
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 return VINF_SUCCESS;
4761}
4762
4763
4764/**
4765 * @opcode 0x9f
4766 */
4767FNIEMOP_DEF(iemOp_lahf)
4768{
4769 IEMOP_MNEMONIC(lahf, "lahf");
4770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4771 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4772 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4773 return IEMOP_RAISE_INVALID_OPCODE();
4774 IEM_MC_BEGIN(0, 1);
4775 IEM_MC_LOCAL(uint8_t, u8Flags);
4776 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4777 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4778 IEM_MC_ADVANCE_RIP();
4779 IEM_MC_END();
4780 return VINF_SUCCESS;
4781}
4782
4783
4784/**
4785 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4786 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4787 * prefixes. Will return on failures.
4788 * @param a_GCPtrMemOff The variable to store the offset in.
4789 */
4790#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4791 do \
4792 { \
4793 switch (pVCpu->iem.s.enmEffAddrMode) \
4794 { \
4795 case IEMMODE_16BIT: \
4796 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4797 break; \
4798 case IEMMODE_32BIT: \
4799 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4800 break; \
4801 case IEMMODE_64BIT: \
4802 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4803 break; \
4804 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4805 } \
4806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4807 } while (0)
4808
4809/**
4810 * @opcode 0xa0
4811 */
4812FNIEMOP_DEF(iemOp_mov_AL_Ob)
4813{
4814 /*
4815 * Get the offset and fend off lock prefixes.
4816 */
4817 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4818 RTGCPTR GCPtrMemOff;
4819 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4820
4821 /*
4822 * Fetch AL.
4823 */
4824 IEM_MC_BEGIN(0,1);
4825 IEM_MC_LOCAL(uint8_t, u8Tmp);
4826 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4827 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4828 IEM_MC_ADVANCE_RIP();
4829 IEM_MC_END();
4830 return VINF_SUCCESS;
4831}
4832
4833
4834/**
4835 * @opcode 0xa1
4836 */
4837FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4838{
4839 /*
4840 * Get the offset and fend off lock prefixes.
4841 */
4842 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4843 RTGCPTR GCPtrMemOff;
4844 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4845
4846 /*
4847 * Fetch rAX.
4848 */
4849 switch (pVCpu->iem.s.enmEffOpSize)
4850 {
4851 case IEMMODE_16BIT:
4852 IEM_MC_BEGIN(0,1);
4853 IEM_MC_LOCAL(uint16_t, u16Tmp);
4854 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4855 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 return VINF_SUCCESS;
4859
4860 case IEMMODE_32BIT:
4861 IEM_MC_BEGIN(0,1);
4862 IEM_MC_LOCAL(uint32_t, u32Tmp);
4863 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4864 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4865 IEM_MC_ADVANCE_RIP();
4866 IEM_MC_END();
4867 return VINF_SUCCESS;
4868
4869 case IEMMODE_64BIT:
4870 IEM_MC_BEGIN(0,1);
4871 IEM_MC_LOCAL(uint64_t, u64Tmp);
4872 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4873 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4874 IEM_MC_ADVANCE_RIP();
4875 IEM_MC_END();
4876 return VINF_SUCCESS;
4877
4878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4879 }
4880}
4881
4882
4883/**
4884 * @opcode 0xa2
4885 */
4886FNIEMOP_DEF(iemOp_mov_Ob_AL)
4887{
4888 /*
4889 * Get the offset and fend off lock prefixes.
4890 */
4891 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4892 RTGCPTR GCPtrMemOff;
4893 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4894
4895 /*
4896 * Store AL.
4897 */
4898 IEM_MC_BEGIN(0,1);
4899 IEM_MC_LOCAL(uint8_t, u8Tmp);
4900 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4901 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4902 IEM_MC_ADVANCE_RIP();
4903 IEM_MC_END();
4904 return VINF_SUCCESS;
4905}
4906
4907
4908/**
4909 * @opcode 0xa3
4910 */
4911FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4912{
4913 /*
4914 * Get the offset and fend off lock prefixes.
4915 */
4916 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4917 RTGCPTR GCPtrMemOff;
4918 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4919
4920 /*
4921 * Store rAX.
4922 */
4923 switch (pVCpu->iem.s.enmEffOpSize)
4924 {
4925 case IEMMODE_16BIT:
4926 IEM_MC_BEGIN(0,1);
4927 IEM_MC_LOCAL(uint16_t, u16Tmp);
4928 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4929 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4930 IEM_MC_ADVANCE_RIP();
4931 IEM_MC_END();
4932 return VINF_SUCCESS;
4933
4934 case IEMMODE_32BIT:
4935 IEM_MC_BEGIN(0,1);
4936 IEM_MC_LOCAL(uint32_t, u32Tmp);
4937 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4938 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4939 IEM_MC_ADVANCE_RIP();
4940 IEM_MC_END();
4941 return VINF_SUCCESS;
4942
4943 case IEMMODE_64BIT:
4944 IEM_MC_BEGIN(0,1);
4945 IEM_MC_LOCAL(uint64_t, u64Tmp);
4946 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4947 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4948 IEM_MC_ADVANCE_RIP();
4949 IEM_MC_END();
4950 return VINF_SUCCESS;
4951
4952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4953 }
4954}
4955
4956/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4957#define IEM_MOVS_CASE(ValBits, AddrBits) \
4958 IEM_MC_BEGIN(0, 2); \
4959 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4960 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4961 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4962 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4963 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4964 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4966 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4967 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4968 } IEM_MC_ELSE() { \
4969 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4970 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4971 } IEM_MC_ENDIF(); \
4972 IEM_MC_ADVANCE_RIP(); \
4973 IEM_MC_END();
4974
4975/**
4976 * @opcode 0xa4
4977 */
4978FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4979{
4980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4981
4982 /*
4983 * Use the C implementation if a repeat prefix is encountered.
4984 */
4985 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4986 {
4987 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4988 switch (pVCpu->iem.s.enmEffAddrMode)
4989 {
4990 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4991 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4992 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4994 }
4995 }
4996 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4997
4998 /*
4999 * Sharing case implementation with movs[wdq] below.
5000 */
5001 switch (pVCpu->iem.s.enmEffAddrMode)
5002 {
5003 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5004 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5005 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5007 }
5008 return VINF_SUCCESS;
5009}
5010
5011
5012/**
5013 * @opcode 0xa5
5014 */
5015FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5016{
5017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5018
5019 /*
5020 * Use the C implementation if a repeat prefix is encountered.
5021 */
5022 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5023 {
5024 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5025 switch (pVCpu->iem.s.enmEffOpSize)
5026 {
5027 case IEMMODE_16BIT:
5028 switch (pVCpu->iem.s.enmEffAddrMode)
5029 {
5030 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5031 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5032 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5034 }
5035 break;
5036 case IEMMODE_32BIT:
5037 switch (pVCpu->iem.s.enmEffAddrMode)
5038 {
5039 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5040 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5041 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 case IEMMODE_64BIT:
5045 switch (pVCpu->iem.s.enmEffAddrMode)
5046 {
5047 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5048 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5049 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5051 }
5052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5053 }
5054 }
5055 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5056
5057 /*
5058 * Annoying double switch here.
5059 * Using ugly macro for implementing the cases, sharing it with movsb.
5060 */
5061 switch (pVCpu->iem.s.enmEffOpSize)
5062 {
5063 case IEMMODE_16BIT:
5064 switch (pVCpu->iem.s.enmEffAddrMode)
5065 {
5066 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5067 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5068 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5070 }
5071 break;
5072
5073 case IEMMODE_32BIT:
5074 switch (pVCpu->iem.s.enmEffAddrMode)
5075 {
5076 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5077 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5078 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5080 }
5081 break;
5082
5083 case IEMMODE_64BIT:
5084 switch (pVCpu->iem.s.enmEffAddrMode)
5085 {
5086 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5087 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5088 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5090 }
5091 break;
5092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5093 }
5094 return VINF_SUCCESS;
5095}
5096
5097#undef IEM_MOVS_CASE
5098
5099/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5100#define IEM_CMPS_CASE(ValBits, AddrBits) \
5101 IEM_MC_BEGIN(3, 3); \
5102 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5103 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5104 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5105 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5106 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5107 \
5108 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5109 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5110 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5111 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5112 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5113 IEM_MC_REF_EFLAGS(pEFlags); \
5114 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5115 \
5116 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5117 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5118 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5119 } IEM_MC_ELSE() { \
5120 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5121 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5122 } IEM_MC_ENDIF(); \
5123 IEM_MC_ADVANCE_RIP(); \
5124 IEM_MC_END(); \
5125
5126/**
5127 * @opcode 0xa6
5128 */
5129FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5130{
5131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5132
5133 /*
5134 * Use the C implementation if a repeat prefix is encountered.
5135 */
5136 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5137 {
5138 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5139 switch (pVCpu->iem.s.enmEffAddrMode)
5140 {
5141 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5142 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5143 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5145 }
5146 }
5147 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5148 {
5149 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5150 switch (pVCpu->iem.s.enmEffAddrMode)
5151 {
5152 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5153 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5154 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5156 }
5157 }
5158 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5159
5160 /*
5161 * Sharing case implementation with cmps[wdq] below.
5162 */
5163 switch (pVCpu->iem.s.enmEffAddrMode)
5164 {
5165 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5166 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5167 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5169 }
5170 return VINF_SUCCESS;
5171
5172}
5173
5174
5175/**
5176 * @opcode 0xa7
5177 */
5178FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5179{
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181
5182 /*
5183 * Use the C implementation if a repeat prefix is encountered.
5184 */
5185 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5186 {
5187 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5188 switch (pVCpu->iem.s.enmEffOpSize)
5189 {
5190 case IEMMODE_16BIT:
5191 switch (pVCpu->iem.s.enmEffAddrMode)
5192 {
5193 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5194 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5195 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5197 }
5198 break;
5199 case IEMMODE_32BIT:
5200 switch (pVCpu->iem.s.enmEffAddrMode)
5201 {
5202 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5203 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5204 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5206 }
5207 case IEMMODE_64BIT:
5208 switch (pVCpu->iem.s.enmEffAddrMode)
5209 {
5210 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5211 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5212 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5214 }
5215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5216 }
5217 }
5218
5219 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5220 {
5221 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5222 switch (pVCpu->iem.s.enmEffOpSize)
5223 {
5224 case IEMMODE_16BIT:
5225 switch (pVCpu->iem.s.enmEffAddrMode)
5226 {
5227 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5228 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5229 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5231 }
5232 break;
5233 case IEMMODE_32BIT:
5234 switch (pVCpu->iem.s.enmEffAddrMode)
5235 {
5236 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5237 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5238 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5240 }
5241 case IEMMODE_64BIT:
5242 switch (pVCpu->iem.s.enmEffAddrMode)
5243 {
5244 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5245 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5246 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5248 }
5249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5250 }
5251 }
5252
5253 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5254
5255 /*
5256 * Annoying double switch here.
5257 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5258 */
5259 switch (pVCpu->iem.s.enmEffOpSize)
5260 {
5261 case IEMMODE_16BIT:
5262 switch (pVCpu->iem.s.enmEffAddrMode)
5263 {
5264 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5265 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5266 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5268 }
5269 break;
5270
5271 case IEMMODE_32BIT:
5272 switch (pVCpu->iem.s.enmEffAddrMode)
5273 {
5274 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5275 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5276 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5278 }
5279 break;
5280
5281 case IEMMODE_64BIT:
5282 switch (pVCpu->iem.s.enmEffAddrMode)
5283 {
5284 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5285 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5286 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5288 }
5289 break;
5290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5291 }
5292 return VINF_SUCCESS;
5293
5294}
5295
5296#undef IEM_CMPS_CASE
5297
5298/**
5299 * @opcode 0xa8
5300 */
5301FNIEMOP_DEF(iemOp_test_AL_Ib)
5302{
5303 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5305 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5306}
5307
5308
5309/**
5310 * @opcode 0xa9
5311 */
5312FNIEMOP_DEF(iemOp_test_eAX_Iz)
5313{
5314 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5317}
5318
5319
5320/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5321#define IEM_STOS_CASE(ValBits, AddrBits) \
5322 IEM_MC_BEGIN(0, 2); \
5323 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5324 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5325 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5326 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5327 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5329 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5330 } IEM_MC_ELSE() { \
5331 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5332 } IEM_MC_ENDIF(); \
5333 IEM_MC_ADVANCE_RIP(); \
5334 IEM_MC_END(); \
5335
5336/**
5337 * @opcode 0xaa
5338 */
5339FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5340{
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5342
5343 /*
5344 * Use the C implementation if a repeat prefix is encountered.
5345 */
5346 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5347 {
5348 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5349 switch (pVCpu->iem.s.enmEffAddrMode)
5350 {
5351 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5352 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5353 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5355 }
5356 }
5357 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5358
5359 /*
5360 * Sharing case implementation with stos[wdq] below.
5361 */
5362 switch (pVCpu->iem.s.enmEffAddrMode)
5363 {
5364 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5365 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5366 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5368 }
5369 return VINF_SUCCESS;
5370}
5371
5372
5373/**
5374 * @opcode 0xab
5375 */
5376FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5377{
5378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5379
5380 /*
5381 * Use the C implementation if a repeat prefix is encountered.
5382 */
5383 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5384 {
5385 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5386 switch (pVCpu->iem.s.enmEffOpSize)
5387 {
5388 case IEMMODE_16BIT:
5389 switch (pVCpu->iem.s.enmEffAddrMode)
5390 {
5391 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5392 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5393 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5395 }
5396 break;
5397 case IEMMODE_32BIT:
5398 switch (pVCpu->iem.s.enmEffAddrMode)
5399 {
5400 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5401 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5402 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5404 }
5405 case IEMMODE_64BIT:
5406 switch (pVCpu->iem.s.enmEffAddrMode)
5407 {
5408 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5409 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5410 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5412 }
5413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5414 }
5415 }
5416 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5417
5418 /*
5419 * Annoying double switch here.
5420 * Using ugly macro for implementing the cases, sharing it with stosb.
5421 */
5422 switch (pVCpu->iem.s.enmEffOpSize)
5423 {
5424 case IEMMODE_16BIT:
5425 switch (pVCpu->iem.s.enmEffAddrMode)
5426 {
5427 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5428 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5429 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5431 }
5432 break;
5433
5434 case IEMMODE_32BIT:
5435 switch (pVCpu->iem.s.enmEffAddrMode)
5436 {
5437 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5438 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5439 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5441 }
5442 break;
5443
5444 case IEMMODE_64BIT:
5445 switch (pVCpu->iem.s.enmEffAddrMode)
5446 {
5447 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5448 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5449 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5451 }
5452 break;
5453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5454 }
5455 return VINF_SUCCESS;
5456}
5457
5458#undef IEM_STOS_CASE
5459
5460/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5461#define IEM_LODS_CASE(ValBits, AddrBits) \
5462 IEM_MC_BEGIN(0, 2); \
5463 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5464 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5465 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5466 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5467 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5468 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5469 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5470 } IEM_MC_ELSE() { \
5471 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5472 } IEM_MC_ENDIF(); \
5473 IEM_MC_ADVANCE_RIP(); \
5474 IEM_MC_END();
5475
5476/**
5477 * @opcode 0xac
5478 */
5479FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5480{
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482
5483 /*
5484 * Use the C implementation if a repeat prefix is encountered.
5485 */
5486 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5487 {
5488 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5489 switch (pVCpu->iem.s.enmEffAddrMode)
5490 {
5491 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5492 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5493 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5495 }
5496 }
5497 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5498
5499 /*
5500 * Sharing case implementation with stos[wdq] below.
5501 */
5502 switch (pVCpu->iem.s.enmEffAddrMode)
5503 {
5504 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5505 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5506 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5508 }
5509 return VINF_SUCCESS;
5510}
5511
5512
5513/**
5514 * @opcode 0xad
5515 */
5516FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5517{
5518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5519
5520 /*
5521 * Use the C implementation if a repeat prefix is encountered.
5522 */
5523 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5524 {
5525 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5526 switch (pVCpu->iem.s.enmEffOpSize)
5527 {
5528 case IEMMODE_16BIT:
5529 switch (pVCpu->iem.s.enmEffAddrMode)
5530 {
5531 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5532 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5533 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5535 }
5536 break;
5537 case IEMMODE_32BIT:
5538 switch (pVCpu->iem.s.enmEffAddrMode)
5539 {
5540 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5541 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5542 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5544 }
5545 case IEMMODE_64BIT:
5546 switch (pVCpu->iem.s.enmEffAddrMode)
5547 {
5548 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5549 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5550 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5552 }
5553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5554 }
5555 }
5556 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5557
5558 /*
5559 * Annoying double switch here.
5560 * Using ugly macro for implementing the cases, sharing it with lodsb.
5561 */
5562 switch (pVCpu->iem.s.enmEffOpSize)
5563 {
5564 case IEMMODE_16BIT:
5565 switch (pVCpu->iem.s.enmEffAddrMode)
5566 {
5567 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5568 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5569 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5571 }
5572 break;
5573
5574 case IEMMODE_32BIT:
5575 switch (pVCpu->iem.s.enmEffAddrMode)
5576 {
5577 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5578 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5579 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5581 }
5582 break;
5583
5584 case IEMMODE_64BIT:
5585 switch (pVCpu->iem.s.enmEffAddrMode)
5586 {
5587 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5588 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5589 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5591 }
5592 break;
5593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5594 }
5595 return VINF_SUCCESS;
5596}
5597
5598#undef IEM_LODS_CASE
5599
5600/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5601#define IEM_SCAS_CASE(ValBits, AddrBits) \
5602 IEM_MC_BEGIN(3, 2); \
5603 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5604 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5605 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5606 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5607 \
5608 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5609 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5610 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5611 IEM_MC_REF_EFLAGS(pEFlags); \
5612 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5613 \
5614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5615 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5616 } IEM_MC_ELSE() { \
5617 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5618 } IEM_MC_ENDIF(); \
5619 IEM_MC_ADVANCE_RIP(); \
5620 IEM_MC_END();
5621
5622/**
5623 * @opcode 0xae
5624 */
5625FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5626{
5627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5628
5629 /*
5630 * Use the C implementation if a repeat prefix is encountered.
5631 */
5632 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5633 {
5634 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5635 switch (pVCpu->iem.s.enmEffAddrMode)
5636 {
5637 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5638 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5639 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5641 }
5642 }
5643 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5644 {
5645 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5646 switch (pVCpu->iem.s.enmEffAddrMode)
5647 {
5648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5652 }
5653 }
5654 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5655
5656 /*
5657 * Sharing case implementation with stos[wdq] below.
5658 */
5659 switch (pVCpu->iem.s.enmEffAddrMode)
5660 {
5661 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5662 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5663 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5665 }
5666 return VINF_SUCCESS;
5667}
5668
5669
5670/**
5671 * @opcode 0xaf
5672 */
5673FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5674{
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676
5677 /*
5678 * Use the C implementation if a repeat prefix is encountered.
5679 */
5680 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5681 {
5682 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5683 switch (pVCpu->iem.s.enmEffOpSize)
5684 {
5685 case IEMMODE_16BIT:
5686 switch (pVCpu->iem.s.enmEffAddrMode)
5687 {
5688 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5689 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5690 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5692 }
5693 break;
5694 case IEMMODE_32BIT:
5695 switch (pVCpu->iem.s.enmEffAddrMode)
5696 {
5697 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5698 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5699 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5701 }
5702 case IEMMODE_64BIT:
5703 switch (pVCpu->iem.s.enmEffAddrMode)
5704 {
5705 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5706 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5707 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5709 }
5710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5711 }
5712 }
5713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5714 {
5715 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5716 switch (pVCpu->iem.s.enmEffOpSize)
5717 {
5718 case IEMMODE_16BIT:
5719 switch (pVCpu->iem.s.enmEffAddrMode)
5720 {
5721 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5722 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5723 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5725 }
5726 break;
5727 case IEMMODE_32BIT:
5728 switch (pVCpu->iem.s.enmEffAddrMode)
5729 {
5730 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5731 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5732 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5734 }
5735 case IEMMODE_64BIT:
5736 switch (pVCpu->iem.s.enmEffAddrMode)
5737 {
5738 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5739 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5740 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5742 }
5743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5744 }
5745 }
5746 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5747
5748 /*
5749 * Annoying double switch here.
5750 * Using ugly macro for implementing the cases, sharing it with scasb.
5751 */
5752 switch (pVCpu->iem.s.enmEffOpSize)
5753 {
5754 case IEMMODE_16BIT:
5755 switch (pVCpu->iem.s.enmEffAddrMode)
5756 {
5757 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5758 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5759 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5761 }
5762 break;
5763
5764 case IEMMODE_32BIT:
5765 switch (pVCpu->iem.s.enmEffAddrMode)
5766 {
5767 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5768 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5769 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5771 }
5772 break;
5773
5774 case IEMMODE_64BIT:
5775 switch (pVCpu->iem.s.enmEffAddrMode)
5776 {
5777 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5778 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5779 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5781 }
5782 break;
5783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5784 }
5785 return VINF_SUCCESS;
5786}
5787
5788#undef IEM_SCAS_CASE
5789
5790/**
5791 * Common 'mov r8, imm8' helper.
5792 */
5793FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5794{
5795 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797
5798 IEM_MC_BEGIN(0, 1);
5799 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5800 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5801 IEM_MC_ADVANCE_RIP();
5802 IEM_MC_END();
5803
5804 return VINF_SUCCESS;
5805}
5806
5807
5808/**
5809 * @opcode 0xb0
5810 */
5811FNIEMOP_DEF(iemOp_mov_AL_Ib)
5812{
5813 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5814 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5815}
5816
5817
5818/**
5819 * @opcode 0xb1
5820 */
5821FNIEMOP_DEF(iemOp_CL_Ib)
5822{
5823 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5824 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5825}
5826
5827
5828/**
5829 * @opcode 0xb2
5830 */
5831FNIEMOP_DEF(iemOp_DL_Ib)
5832{
5833 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5834 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5835}
5836
5837
5838/**
5839 * @opcode 0xb3
5840 */
5841FNIEMOP_DEF(iemOp_BL_Ib)
5842{
5843 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5844 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5845}
5846
5847
5848/**
5849 * @opcode 0xb4
5850 */
5851FNIEMOP_DEF(iemOp_mov_AH_Ib)
5852{
5853 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5854 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5855}
5856
5857
5858/**
5859 * @opcode 0xb5
5860 */
5861FNIEMOP_DEF(iemOp_CH_Ib)
5862{
5863 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5864 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5865}
5866
5867
5868/**
5869 * @opcode 0xb6
5870 */
5871FNIEMOP_DEF(iemOp_DH_Ib)
5872{
5873 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5874 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5875}
5876
5877
5878/**
5879 * @opcode 0xb7
5880 */
5881FNIEMOP_DEF(iemOp_BH_Ib)
5882{
5883 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5884 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5885}
5886
5887
5888/**
5889 * Common 'mov regX,immX' helper.
5890 */
5891FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5892{
5893 switch (pVCpu->iem.s.enmEffOpSize)
5894 {
5895 case IEMMODE_16BIT:
5896 {
5897 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5899
5900 IEM_MC_BEGIN(0, 1);
5901 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5902 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5903 IEM_MC_ADVANCE_RIP();
5904 IEM_MC_END();
5905 break;
5906 }
5907
5908 case IEMMODE_32BIT:
5909 {
5910 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912
5913 IEM_MC_BEGIN(0, 1);
5914 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5915 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5916 IEM_MC_ADVANCE_RIP();
5917 IEM_MC_END();
5918 break;
5919 }
5920 case IEMMODE_64BIT:
5921 {
5922 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5924
5925 IEM_MC_BEGIN(0, 1);
5926 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5927 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5928 IEM_MC_ADVANCE_RIP();
5929 IEM_MC_END();
5930 break;
5931 }
5932 }
5933
5934 return VINF_SUCCESS;
5935}
5936
5937
5938/**
5939 * @opcode 0xb8
5940 */
5941FNIEMOP_DEF(iemOp_eAX_Iv)
5942{
5943 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5944 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5945}
5946
5947
5948/**
5949 * @opcode 0xb9
5950 */
5951FNIEMOP_DEF(iemOp_eCX_Iv)
5952{
5953 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5954 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5955}
5956
5957
5958/**
5959 * @opcode 0xba
5960 */
5961FNIEMOP_DEF(iemOp_eDX_Iv)
5962{
5963 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5964 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5965}
5966
5967
5968/**
5969 * @opcode 0xbb
5970 */
5971FNIEMOP_DEF(iemOp_eBX_Iv)
5972{
5973 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5974 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5975}
5976
5977
5978/**
5979 * @opcode 0xbc
5980 */
5981FNIEMOP_DEF(iemOp_eSP_Iv)
5982{
5983 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5984 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5985}
5986
5987
5988/**
5989 * @opcode 0xbd
5990 */
5991FNIEMOP_DEF(iemOp_eBP_Iv)
5992{
5993 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5994 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5995}
5996
5997
5998/**
5999 * @opcode 0xbe
6000 */
6001FNIEMOP_DEF(iemOp_eSI_Iv)
6002{
6003 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6004 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6005}
6006
6007
6008/**
6009 * @opcode 0xbf
6010 */
6011FNIEMOP_DEF(iemOp_eDI_Iv)
6012{
6013 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6014 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6015}
6016
6017
6018/**
6019 * @opcode 0xc0
6020 */
6021FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6022{
6023 IEMOP_HLP_MIN_186();
6024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6025 PCIEMOPSHIFTSIZES pImpl;
6026 switch (IEM_GET_MODRM_REG_8(bRm))
6027 {
6028 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6029 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6030 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6031 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6032 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6033 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6034 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6035 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6036 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6037 }
6038 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6039
6040 if (IEM_IS_MODRM_REG_MODE(bRm))
6041 {
6042 /* register */
6043 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6045 IEM_MC_BEGIN(3, 0);
6046 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6047 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6048 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6049 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6050 IEM_MC_REF_EFLAGS(pEFlags);
6051 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6052 IEM_MC_ADVANCE_RIP();
6053 IEM_MC_END();
6054 }
6055 else
6056 {
6057 /* memory */
6058 IEM_MC_BEGIN(3, 2);
6059 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6060 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6061 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6063
6064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6065 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6066 IEM_MC_ASSIGN(cShiftArg, cShift);
6067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6068 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6069 IEM_MC_FETCH_EFLAGS(EFlags);
6070 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6071
6072 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6073 IEM_MC_COMMIT_EFLAGS(EFlags);
6074 IEM_MC_ADVANCE_RIP();
6075 IEM_MC_END();
6076 }
6077 return VINF_SUCCESS;
6078}
6079
6080
6081/**
6082 * @opcode 0xc1
6083 */
6084FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6085{
6086 IEMOP_HLP_MIN_186();
6087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6088 PCIEMOPSHIFTSIZES pImpl;
6089 switch (IEM_GET_MODRM_REG_8(bRm))
6090 {
6091 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6092 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6093 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6094 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6095 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6096 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6097 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6098 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6099 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6100 }
6101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6102
6103 if (IEM_IS_MODRM_REG_MODE(bRm))
6104 {
6105 /* register */
6106 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108 switch (pVCpu->iem.s.enmEffOpSize)
6109 {
6110 case IEMMODE_16BIT:
6111 IEM_MC_BEGIN(3, 0);
6112 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6113 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6115 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6118 IEM_MC_ADVANCE_RIP();
6119 IEM_MC_END();
6120 return VINF_SUCCESS;
6121
6122 case IEMMODE_32BIT:
6123 IEM_MC_BEGIN(3, 0);
6124 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6125 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6126 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6127 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6128 IEM_MC_REF_EFLAGS(pEFlags);
6129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6130 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6131 IEM_MC_ADVANCE_RIP();
6132 IEM_MC_END();
6133 return VINF_SUCCESS;
6134
6135 case IEMMODE_64BIT:
6136 IEM_MC_BEGIN(3, 0);
6137 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6138 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6139 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6140 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6141 IEM_MC_REF_EFLAGS(pEFlags);
6142 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6143 IEM_MC_ADVANCE_RIP();
6144 IEM_MC_END();
6145 return VINF_SUCCESS;
6146
6147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6148 }
6149 }
6150 else
6151 {
6152 /* memory */
6153 switch (pVCpu->iem.s.enmEffOpSize)
6154 {
6155 case IEMMODE_16BIT:
6156 IEM_MC_BEGIN(3, 2);
6157 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6158 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6159 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6161
6162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6163 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6164 IEM_MC_ASSIGN(cShiftArg, cShift);
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6167 IEM_MC_FETCH_EFLAGS(EFlags);
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6169
6170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6171 IEM_MC_COMMIT_EFLAGS(EFlags);
6172 IEM_MC_ADVANCE_RIP();
6173 IEM_MC_END();
6174 return VINF_SUCCESS;
6175
6176 case IEMMODE_32BIT:
6177 IEM_MC_BEGIN(3, 2);
6178 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6179 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6180 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6182
6183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6184 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6185 IEM_MC_ASSIGN(cShiftArg, cShift);
6186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6187 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6188 IEM_MC_FETCH_EFLAGS(EFlags);
6189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6190
6191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6192 IEM_MC_COMMIT_EFLAGS(EFlags);
6193 IEM_MC_ADVANCE_RIP();
6194 IEM_MC_END();
6195 return VINF_SUCCESS;
6196
6197 case IEMMODE_64BIT:
6198 IEM_MC_BEGIN(3, 2);
6199 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6200 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6201 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6203
6204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6205 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6206 IEM_MC_ASSIGN(cShiftArg, cShift);
6207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6208 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6209 IEM_MC_FETCH_EFLAGS(EFlags);
6210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6211
6212 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6213 IEM_MC_COMMIT_EFLAGS(EFlags);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217
6218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6219 }
6220 }
6221}
6222
6223
6224/**
6225 * @opcode 0xc2
6226 */
6227FNIEMOP_DEF(iemOp_retn_Iw)
6228{
6229 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6230 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6233 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6234}
6235
6236
6237/**
6238 * @opcode 0xc3
6239 */
6240FNIEMOP_DEF(iemOp_retn)
6241{
6242 IEMOP_MNEMONIC(retn, "retn");
6243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6245 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6246}
6247
6248
6249/**
6250 * @opcode 0xc4
6251 */
6252FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6253{
6254 /* The LDS instruction is invalid 64-bit mode. In legacy and
6255 compatability mode it is invalid with MOD=3.
6256 The use as a VEX prefix is made possible by assigning the inverted
6257 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6258 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6260 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6261 || IEM_IS_MODRM_REG_MODE(bRm) )
6262 {
6263 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6264 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6265 {
6266 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6267 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6268 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6269 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6270 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6271 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6273 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6274 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6275 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6276 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6277 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6278 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6279
6280 switch (bRm & 0x1f)
6281 {
6282 case 1: /* 0x0f lead opcode byte. */
6283#ifdef IEM_WITH_VEX
6284 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6285#else
6286 IEMOP_BITCH_ABOUT_STUB();
6287 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6288#endif
6289
6290 case 2: /* 0x0f 0x38 lead opcode bytes. */
6291#ifdef IEM_WITH_VEX
6292 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6293#else
6294 IEMOP_BITCH_ABOUT_STUB();
6295 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6296#endif
6297
6298 case 3: /* 0x0f 0x3a lead opcode bytes. */
6299#ifdef IEM_WITH_VEX
6300 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6301#else
6302 IEMOP_BITCH_ABOUT_STUB();
6303 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6304#endif
6305
6306 default:
6307 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6308 return IEMOP_RAISE_INVALID_OPCODE();
6309 }
6310 }
6311 Log(("VEX3: AVX support disabled!\n"));
6312 return IEMOP_RAISE_INVALID_OPCODE();
6313 }
6314
6315 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6316 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6317}
6318
6319
6320/**
6321 * @opcode 0xc5
6322 */
6323FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6324{
6325 /* The LES instruction is invalid 64-bit mode. In legacy and
6326 compatability mode it is invalid with MOD=3.
6327 The use as a VEX prefix is made possible by assigning the inverted
6328 REX.R to the top MOD bit, and the top bit in the inverted register
6329 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6330 to accessing registers 0..7 in this VEX form. */
6331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6332 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6333 || IEM_IS_MODRM_REG_MODE(bRm))
6334 {
6335 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6336 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6337 {
6338 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6339 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6340 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6341 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6342 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6343 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6344 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6345 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6346
6347#ifdef IEM_WITH_VEX
6348 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6349#else
6350 IEMOP_BITCH_ABOUT_STUB();
6351 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6352#endif
6353 }
6354
6355 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6356 Log(("VEX2: AVX support disabled!\n"));
6357 return IEMOP_RAISE_INVALID_OPCODE();
6358 }
6359
6360 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6361 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6362}
6363
6364
6365/**
6366 * @opcode 0xc6
6367 */
6368FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6369{
6370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6371 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6372 return IEMOP_RAISE_INVALID_OPCODE();
6373 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6374
6375 if (IEM_IS_MODRM_REG_MODE(bRm))
6376 {
6377 /* register access */
6378 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6380 IEM_MC_BEGIN(0, 0);
6381 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 }
6385 else
6386 {
6387 /* memory access. */
6388 IEM_MC_BEGIN(0, 1);
6389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6391 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6393 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6394 IEM_MC_ADVANCE_RIP();
6395 IEM_MC_END();
6396 }
6397 return VINF_SUCCESS;
6398}
6399
6400
6401/**
6402 * @opcode 0xc7
6403 */
6404FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6405{
6406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6407 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6408 return IEMOP_RAISE_INVALID_OPCODE();
6409 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6410
6411 if (IEM_IS_MODRM_REG_MODE(bRm))
6412 {
6413 /* register access */
6414 switch (pVCpu->iem.s.enmEffOpSize)
6415 {
6416 case IEMMODE_16BIT:
6417 IEM_MC_BEGIN(0, 0);
6418 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6420 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 case IEMMODE_32BIT:
6426 IEM_MC_BEGIN(0, 0);
6427 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6429 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
6430 IEM_MC_ADVANCE_RIP();
6431 IEM_MC_END();
6432 return VINF_SUCCESS;
6433
6434 case IEMMODE_64BIT:
6435 IEM_MC_BEGIN(0, 0);
6436 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6438 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
6439 IEM_MC_ADVANCE_RIP();
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442
6443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6444 }
6445 }
6446 else
6447 {
6448 /* memory access. */
6449 switch (pVCpu->iem.s.enmEffOpSize)
6450 {
6451 case IEMMODE_16BIT:
6452 IEM_MC_BEGIN(0, 1);
6453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6457 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6458 IEM_MC_ADVANCE_RIP();
6459 IEM_MC_END();
6460 return VINF_SUCCESS;
6461
6462 case IEMMODE_32BIT:
6463 IEM_MC_BEGIN(0, 1);
6464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6466 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6468 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6469 IEM_MC_ADVANCE_RIP();
6470 IEM_MC_END();
6471 return VINF_SUCCESS;
6472
6473 case IEMMODE_64BIT:
6474 IEM_MC_BEGIN(0, 1);
6475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6477 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6485 }
6486 }
6487}
6488
6489
6490
6491
6492/**
6493 * @opcode 0xc8
6494 */
6495FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6496{
6497 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6498 IEMOP_HLP_MIN_186();
6499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6500 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6501 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6503 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6504}
6505
6506
6507/**
6508 * @opcode 0xc9
6509 */
6510FNIEMOP_DEF(iemOp_leave)
6511{
6512 IEMOP_MNEMONIC(leave, "leave");
6513 IEMOP_HLP_MIN_186();
6514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6516 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6517}
6518
6519
6520/**
6521 * @opcode 0xca
6522 */
6523FNIEMOP_DEF(iemOp_retf_Iw)
6524{
6525 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6526 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6529 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6530}
6531
6532
6533/**
6534 * @opcode 0xcb
6535 */
6536FNIEMOP_DEF(iemOp_retf)
6537{
6538 IEMOP_MNEMONIC(retf, "retf");
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6541 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6542}
6543
6544
6545/**
6546 * @opcode 0xcc
6547 */
6548FNIEMOP_DEF(iemOp_int3)
6549{
6550 IEMOP_MNEMONIC(int3, "int3");
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6552 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6553}
6554
6555
6556/**
6557 * @opcode 0xcd
6558 */
6559FNIEMOP_DEF(iemOp_int_Ib)
6560{
6561 IEMOP_MNEMONIC(int_Ib, "int Ib");
6562 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6564 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6565}
6566
6567
6568/**
6569 * @opcode 0xce
6570 */
6571FNIEMOP_DEF(iemOp_into)
6572{
6573 IEMOP_MNEMONIC(into, "into");
6574 IEMOP_HLP_NO_64BIT();
6575
6576 IEM_MC_BEGIN(2, 0);
6577 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6578 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6579 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6580 IEM_MC_END();
6581 return VINF_SUCCESS;
6582}
6583
6584
6585/**
6586 * @opcode 0xcf
6587 */
6588FNIEMOP_DEF(iemOp_iret)
6589{
6590 IEMOP_MNEMONIC(iret, "iret");
6591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6592 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6593}
6594
6595
6596/**
6597 * @opcode 0xd0
6598 */
6599FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6600{
6601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6602 PCIEMOPSHIFTSIZES pImpl;
6603 switch (IEM_GET_MODRM_REG_8(bRm))
6604 {
6605 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6606 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6607 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6608 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6609 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6610 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6611 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6612 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6613 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6614 }
6615 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6616
6617 if (IEM_IS_MODRM_REG_MODE(bRm))
6618 {
6619 /* register */
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6621 IEM_MC_BEGIN(3, 0);
6622 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6623 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6624 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6625 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6626 IEM_MC_REF_EFLAGS(pEFlags);
6627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6628 IEM_MC_ADVANCE_RIP();
6629 IEM_MC_END();
6630 }
6631 else
6632 {
6633 /* memory */
6634 IEM_MC_BEGIN(3, 2);
6635 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6636 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6637 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6639
6640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6642 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6643 IEM_MC_FETCH_EFLAGS(EFlags);
6644 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6645
6646 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6647 IEM_MC_COMMIT_EFLAGS(EFlags);
6648 IEM_MC_ADVANCE_RIP();
6649 IEM_MC_END();
6650 }
6651 return VINF_SUCCESS;
6652}
6653
6654
6655
6656/**
6657 * @opcode 0xd1
6658 */
6659FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6660{
6661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6662 PCIEMOPSHIFTSIZES pImpl;
6663 switch (IEM_GET_MODRM_REG_8(bRm))
6664 {
6665 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6666 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6667 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6668 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6669 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6670 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6671 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6672 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6673 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6674 }
6675 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6676
6677 if (IEM_IS_MODRM_REG_MODE(bRm))
6678 {
6679 /* register */
6680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6681 switch (pVCpu->iem.s.enmEffOpSize)
6682 {
6683 case IEMMODE_16BIT:
6684 IEM_MC_BEGIN(3, 0);
6685 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6686 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6687 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6688 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6689 IEM_MC_REF_EFLAGS(pEFlags);
6690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6691 IEM_MC_ADVANCE_RIP();
6692 IEM_MC_END();
6693 return VINF_SUCCESS;
6694
6695 case IEMMODE_32BIT:
6696 IEM_MC_BEGIN(3, 0);
6697 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6698 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6699 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6700 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6701 IEM_MC_REF_EFLAGS(pEFlags);
6702 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6703 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6704 IEM_MC_ADVANCE_RIP();
6705 IEM_MC_END();
6706 return VINF_SUCCESS;
6707
6708 case IEMMODE_64BIT:
6709 IEM_MC_BEGIN(3, 0);
6710 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6711 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6712 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6713 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6714 IEM_MC_REF_EFLAGS(pEFlags);
6715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6716 IEM_MC_ADVANCE_RIP();
6717 IEM_MC_END();
6718 return VINF_SUCCESS;
6719
6720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6721 }
6722 }
6723 else
6724 {
6725 /* memory */
6726 switch (pVCpu->iem.s.enmEffOpSize)
6727 {
6728 case IEMMODE_16BIT:
6729 IEM_MC_BEGIN(3, 2);
6730 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6731 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6732 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6738 IEM_MC_FETCH_EFLAGS(EFlags);
6739 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6740
6741 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6742 IEM_MC_COMMIT_EFLAGS(EFlags);
6743 IEM_MC_ADVANCE_RIP();
6744 IEM_MC_END();
6745 return VINF_SUCCESS;
6746
6747 case IEMMODE_32BIT:
6748 IEM_MC_BEGIN(3, 2);
6749 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6750 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6751 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6753
6754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6756 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6757 IEM_MC_FETCH_EFLAGS(EFlags);
6758 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6759
6760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6761 IEM_MC_COMMIT_EFLAGS(EFlags);
6762 IEM_MC_ADVANCE_RIP();
6763 IEM_MC_END();
6764 return VINF_SUCCESS;
6765
6766 case IEMMODE_64BIT:
6767 IEM_MC_BEGIN(3, 2);
6768 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6769 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6770 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6772
6773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6775 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6776 IEM_MC_FETCH_EFLAGS(EFlags);
6777 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6778
6779 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6780 IEM_MC_COMMIT_EFLAGS(EFlags);
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 return VINF_SUCCESS;
6784
6785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6786 }
6787 }
6788}
6789
6790
6791/**
6792 * @opcode 0xd2
6793 */
6794FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6795{
6796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6797 PCIEMOPSHIFTSIZES pImpl;
6798 switch (IEM_GET_MODRM_REG_8(bRm))
6799 {
6800 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6801 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6802 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6803 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6804 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6805 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6806 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6807 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6808 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6809 }
6810 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6811
6812 if (IEM_IS_MODRM_REG_MODE(bRm))
6813 {
6814 /* register */
6815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6816 IEM_MC_BEGIN(3, 0);
6817 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6818 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6819 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6820 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6821 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6822 IEM_MC_REF_EFLAGS(pEFlags);
6823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6824 IEM_MC_ADVANCE_RIP();
6825 IEM_MC_END();
6826 }
6827 else
6828 {
6829 /* memory */
6830 IEM_MC_BEGIN(3, 2);
6831 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6832 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6833 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6835
6836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6838 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6839 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6840 IEM_MC_FETCH_EFLAGS(EFlags);
6841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6842
6843 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6844 IEM_MC_COMMIT_EFLAGS(EFlags);
6845 IEM_MC_ADVANCE_RIP();
6846 IEM_MC_END();
6847 }
6848 return VINF_SUCCESS;
6849}
6850
6851
6852/**
6853 * @opcode 0xd3
6854 */
6855FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6856{
6857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6858 PCIEMOPSHIFTSIZES pImpl;
6859 switch (IEM_GET_MODRM_REG_8(bRm))
6860 {
6861 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6862 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6863 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6864 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6865 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6866 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6867 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6868 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6869 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6870 }
6871 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6872
6873 if (IEM_IS_MODRM_REG_MODE(bRm))
6874 {
6875 /* register */
6876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6877 switch (pVCpu->iem.s.enmEffOpSize)
6878 {
6879 case IEMMODE_16BIT:
6880 IEM_MC_BEGIN(3, 0);
6881 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6882 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6883 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6884 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6885 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6886 IEM_MC_REF_EFLAGS(pEFlags);
6887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6888 IEM_MC_ADVANCE_RIP();
6889 IEM_MC_END();
6890 return VINF_SUCCESS;
6891
6892 case IEMMODE_32BIT:
6893 IEM_MC_BEGIN(3, 0);
6894 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6895 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6897 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6898 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6899 IEM_MC_REF_EFLAGS(pEFlags);
6900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6901 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6902 IEM_MC_ADVANCE_RIP();
6903 IEM_MC_END();
6904 return VINF_SUCCESS;
6905
6906 case IEMMODE_64BIT:
6907 IEM_MC_BEGIN(3, 0);
6908 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6909 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6910 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6911 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6912 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6913 IEM_MC_REF_EFLAGS(pEFlags);
6914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6915 IEM_MC_ADVANCE_RIP();
6916 IEM_MC_END();
6917 return VINF_SUCCESS;
6918
6919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6920 }
6921 }
6922 else
6923 {
6924 /* memory */
6925 switch (pVCpu->iem.s.enmEffOpSize)
6926 {
6927 case IEMMODE_16BIT:
6928 IEM_MC_BEGIN(3, 2);
6929 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6930 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6931 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6933
6934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6936 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6937 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6938 IEM_MC_FETCH_EFLAGS(EFlags);
6939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6940
6941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6942 IEM_MC_COMMIT_EFLAGS(EFlags);
6943 IEM_MC_ADVANCE_RIP();
6944 IEM_MC_END();
6945 return VINF_SUCCESS;
6946
6947 case IEMMODE_32BIT:
6948 IEM_MC_BEGIN(3, 2);
6949 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6950 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6951 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6953
6954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6957 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6958 IEM_MC_FETCH_EFLAGS(EFlags);
6959 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6960
6961 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6962 IEM_MC_COMMIT_EFLAGS(EFlags);
6963 IEM_MC_ADVANCE_RIP();
6964 IEM_MC_END();
6965 return VINF_SUCCESS;
6966
6967 case IEMMODE_64BIT:
6968 IEM_MC_BEGIN(3, 2);
6969 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6970 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6971 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6973
6974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6976 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6977 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6978 IEM_MC_FETCH_EFLAGS(EFlags);
6979 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6980
6981 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6982 IEM_MC_COMMIT_EFLAGS(EFlags);
6983 IEM_MC_ADVANCE_RIP();
6984 IEM_MC_END();
6985 return VINF_SUCCESS;
6986
6987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6988 }
6989 }
6990}
6991
6992/**
6993 * @opcode 0xd4
6994 */
6995FNIEMOP_DEF(iemOp_aam_Ib)
6996{
6997 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6998 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7000 IEMOP_HLP_NO_64BIT();
7001 if (!bImm)
7002 return IEMOP_RAISE_DIVIDE_ERROR();
7003 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
7004}
7005
7006
7007/**
7008 * @opcode 0xd5
7009 */
7010FNIEMOP_DEF(iemOp_aad_Ib)
7011{
7012 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7013 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7015 IEMOP_HLP_NO_64BIT();
7016 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7017}
7018
7019
7020/**
7021 * @opcode 0xd6
7022 */
7023FNIEMOP_DEF(iemOp_salc)
7024{
7025 IEMOP_MNEMONIC(salc, "salc");
7026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7027 IEMOP_HLP_NO_64BIT();
7028
7029 IEM_MC_BEGIN(0, 0);
7030 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7031 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7032 } IEM_MC_ELSE() {
7033 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7034 } IEM_MC_ENDIF();
7035 IEM_MC_ADVANCE_RIP();
7036 IEM_MC_END();
7037 return VINF_SUCCESS;
7038}
7039
7040
7041/**
7042 * @opcode 0xd7
7043 */
7044FNIEMOP_DEF(iemOp_xlat)
7045{
7046 IEMOP_MNEMONIC(xlat, "xlat");
7047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7048 switch (pVCpu->iem.s.enmEffAddrMode)
7049 {
7050 case IEMMODE_16BIT:
7051 IEM_MC_BEGIN(2, 0);
7052 IEM_MC_LOCAL(uint8_t, u8Tmp);
7053 IEM_MC_LOCAL(uint16_t, u16Addr);
7054 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7055 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7056 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7057 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7058 IEM_MC_ADVANCE_RIP();
7059 IEM_MC_END();
7060 return VINF_SUCCESS;
7061
7062 case IEMMODE_32BIT:
7063 IEM_MC_BEGIN(2, 0);
7064 IEM_MC_LOCAL(uint8_t, u8Tmp);
7065 IEM_MC_LOCAL(uint32_t, u32Addr);
7066 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7067 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7068 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7069 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7070 IEM_MC_ADVANCE_RIP();
7071 IEM_MC_END();
7072 return VINF_SUCCESS;
7073
7074 case IEMMODE_64BIT:
7075 IEM_MC_BEGIN(2, 0);
7076 IEM_MC_LOCAL(uint8_t, u8Tmp);
7077 IEM_MC_LOCAL(uint64_t, u64Addr);
7078 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7079 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7080 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7081 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7082 IEM_MC_ADVANCE_RIP();
7083 IEM_MC_END();
7084 return VINF_SUCCESS;
7085
7086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7087 }
7088}
7089
7090
7091/**
7092 * Common worker for FPU instructions working on ST0 and STn, and storing the
7093 * result in ST0.
7094 *
7095 * @param bRm Mod R/M byte.
7096 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7097 */
7098FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7099{
7100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7101
7102 IEM_MC_BEGIN(3, 1);
7103 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7104 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7106 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7107
7108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7110 IEM_MC_PREPARE_FPU_USAGE();
7111 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7112 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7113 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7114 IEM_MC_ELSE()
7115 IEM_MC_FPU_STACK_UNDERFLOW(0);
7116 IEM_MC_ENDIF();
7117 IEM_MC_ADVANCE_RIP();
7118
7119 IEM_MC_END();
7120 return VINF_SUCCESS;
7121}
7122
7123
7124/**
7125 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7126 * flags.
7127 *
7128 * @param bRm Mod R/M byte.
7129 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7130 */
7131FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7132{
7133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7134
7135 IEM_MC_BEGIN(3, 1);
7136 IEM_MC_LOCAL(uint16_t, u16Fsw);
7137 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7138 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7139 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7140
7141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7142 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7143 IEM_MC_PREPARE_FPU_USAGE();
7144 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7145 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7146 IEM_MC_UPDATE_FSW(u16Fsw);
7147 IEM_MC_ELSE()
7148 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7149 IEM_MC_ENDIF();
7150 IEM_MC_ADVANCE_RIP();
7151
7152 IEM_MC_END();
7153 return VINF_SUCCESS;
7154}
7155
7156
7157/**
7158 * Common worker for FPU instructions working on ST0 and STn, only affecting
7159 * flags, and popping when done.
7160 *
7161 * @param bRm Mod R/M byte.
7162 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7163 */
7164FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7165{
7166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7167
7168 IEM_MC_BEGIN(3, 1);
7169 IEM_MC_LOCAL(uint16_t, u16Fsw);
7170 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7171 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7173
7174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7176 IEM_MC_PREPARE_FPU_USAGE();
7177 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7178 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7179 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7180 IEM_MC_ELSE()
7181 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7182 IEM_MC_ENDIF();
7183 IEM_MC_ADVANCE_RIP();
7184
7185 IEM_MC_END();
7186 return VINF_SUCCESS;
7187}
7188
7189
7190/** Opcode 0xd8 11/0. */
7191FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7192{
7193 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7194 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7195}
7196
7197
7198/** Opcode 0xd8 11/1. */
7199FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7200{
7201 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7202 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7203}
7204
7205
7206/** Opcode 0xd8 11/2. */
7207FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7208{
7209 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7210 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7211}
7212
7213
7214/** Opcode 0xd8 11/3. */
7215FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7216{
7217 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7218 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7219}
7220
7221
7222/** Opcode 0xd8 11/4. */
7223FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7224{
7225 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7226 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7227}
7228
7229
7230/** Opcode 0xd8 11/5. */
7231FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7232{
7233 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7234 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7235}
7236
7237
7238/** Opcode 0xd8 11/6. */
7239FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7240{
7241 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7243}
7244
7245
7246/** Opcode 0xd8 11/7. */
7247FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7248{
7249 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7250 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7251}
7252
7253
7254/**
7255 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7256 * the result in ST0.
7257 *
7258 * @param bRm Mod R/M byte.
7259 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7260 */
7261FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7262{
7263 IEM_MC_BEGIN(3, 3);
7264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7265 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7266 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7267 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7268 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7269 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7270
7271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7273
7274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7275 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7276 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7277
7278 IEM_MC_PREPARE_FPU_USAGE();
7279 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7280 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7281 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7282 IEM_MC_ELSE()
7283 IEM_MC_FPU_STACK_UNDERFLOW(0);
7284 IEM_MC_ENDIF();
7285 IEM_MC_ADVANCE_RIP();
7286
7287 IEM_MC_END();
7288 return VINF_SUCCESS;
7289}
7290
7291
7292/** Opcode 0xd8 !11/0. */
7293FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7294{
7295 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7297}
7298
7299
7300/** Opcode 0xd8 !11/1. */
7301FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7302{
7303 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7304 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7305}
7306
7307
7308/** Opcode 0xd8 !11/2. */
7309FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7310{
7311 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7312
7313 IEM_MC_BEGIN(3, 3);
7314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7315 IEM_MC_LOCAL(uint16_t, u16Fsw);
7316 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7317 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7318 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7319 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7320
7321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7323
7324 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7325 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7326 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7327
7328 IEM_MC_PREPARE_FPU_USAGE();
7329 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7330 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7331 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7332 IEM_MC_ELSE()
7333 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7334 IEM_MC_ENDIF();
7335 IEM_MC_ADVANCE_RIP();
7336
7337 IEM_MC_END();
7338 return VINF_SUCCESS;
7339}
7340
7341
7342/** Opcode 0xd8 !11/3. */
7343FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7344{
7345 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7346
7347 IEM_MC_BEGIN(3, 3);
7348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7349 IEM_MC_LOCAL(uint16_t, u16Fsw);
7350 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7351 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7352 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7353 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7354
7355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7357
7358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7359 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7360 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7361
7362 IEM_MC_PREPARE_FPU_USAGE();
7363 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7364 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7365 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7366 IEM_MC_ELSE()
7367 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7368 IEM_MC_ENDIF();
7369 IEM_MC_ADVANCE_RIP();
7370
7371 IEM_MC_END();
7372 return VINF_SUCCESS;
7373}
7374
7375
7376/** Opcode 0xd8 !11/4. */
7377FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7378{
7379 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7380 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7381}
7382
7383
7384/** Opcode 0xd8 !11/5. */
7385FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7386{
7387 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7389}
7390
7391
7392/** Opcode 0xd8 !11/6. */
7393FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7394{
7395 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7397}
7398
7399
7400/** Opcode 0xd8 !11/7. */
7401FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7402{
7403 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7404 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7405}
7406
7407
7408/**
7409 * @opcode 0xd8
7410 */
7411FNIEMOP_DEF(iemOp_EscF0)
7412{
7413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7414 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7415
7416 if (IEM_IS_MODRM_REG_MODE(bRm))
7417 {
7418 switch (IEM_GET_MODRM_REG_8(bRm))
7419 {
7420 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7421 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7422 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7423 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7424 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7425 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7426 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7427 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7429 }
7430 }
7431 else
7432 {
7433 switch (IEM_GET_MODRM_REG_8(bRm))
7434 {
7435 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7436 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7437 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7438 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7439 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7440 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7441 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7442 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7444 }
7445 }
7446}
7447
7448
7449/** Opcode 0xd9 /0 mem32real
7450 * @sa iemOp_fld_m64r */
7451FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7452{
7453 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7454
7455 IEM_MC_BEGIN(2, 3);
7456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7457 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7458 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7459 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7460 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7461
7462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7464
7465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7467 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7468
7469 IEM_MC_PREPARE_FPU_USAGE();
7470 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7471 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
7472 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7473 IEM_MC_ELSE()
7474 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7475 IEM_MC_ENDIF();
7476 IEM_MC_ADVANCE_RIP();
7477
7478 IEM_MC_END();
7479 return VINF_SUCCESS;
7480}
7481
7482
7483/** Opcode 0xd9 !11/2 mem32real */
7484FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7485{
7486 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7487 IEM_MC_BEGIN(3, 2);
7488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7489 IEM_MC_LOCAL(uint16_t, u16Fsw);
7490 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7491 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7492 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7493
7494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7498
7499 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7500 IEM_MC_PREPARE_FPU_USAGE();
7501 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7502 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7503 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7504 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7505 IEM_MC_ELSE()
7506 IEM_MC_IF_FCW_IM()
7507 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7508 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7509 IEM_MC_ENDIF();
7510 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7511 IEM_MC_ENDIF();
7512 IEM_MC_ADVANCE_RIP();
7513
7514 IEM_MC_END();
7515 return VINF_SUCCESS;
7516}
7517
7518
7519/** Opcode 0xd9 !11/3 */
7520FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7521{
7522 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7523 IEM_MC_BEGIN(3, 2);
7524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7525 IEM_MC_LOCAL(uint16_t, u16Fsw);
7526 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7527 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7529
7530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7533 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7534
7535 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7536 IEM_MC_PREPARE_FPU_USAGE();
7537 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7538 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7539 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7540 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7541 IEM_MC_ELSE()
7542 IEM_MC_IF_FCW_IM()
7543 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7544 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7545 IEM_MC_ENDIF();
7546 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7547 IEM_MC_ENDIF();
7548 IEM_MC_ADVANCE_RIP();
7549
7550 IEM_MC_END();
7551 return VINF_SUCCESS;
7552}
7553
7554
7555/** Opcode 0xd9 !11/4 */
7556FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7557{
7558 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7559 IEM_MC_BEGIN(3, 0);
7560 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7561 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7562 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7566 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7567 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7568 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7569 IEM_MC_END();
7570 return VINF_SUCCESS;
7571}
7572
7573
7574/** Opcode 0xd9 !11/5 */
7575FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7576{
7577 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7578 IEM_MC_BEGIN(1, 1);
7579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7580 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7584 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7585 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7586 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7587 IEM_MC_END();
7588 return VINF_SUCCESS;
7589}
7590
7591
7592/** Opcode 0xd9 !11/6 */
7593FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7594{
7595 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7596 IEM_MC_BEGIN(3, 0);
7597 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7598 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7599 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7603 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7604 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7605 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7606 IEM_MC_END();
7607 return VINF_SUCCESS;
7608}
7609
7610
7611/** Opcode 0xd9 !11/7 */
7612FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7613{
7614 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7615 IEM_MC_BEGIN(2, 0);
7616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7617 IEM_MC_LOCAL(uint16_t, u16Fcw);
7618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7621 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7622 IEM_MC_FETCH_FCW(u16Fcw);
7623 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7624 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7625 IEM_MC_END();
7626 return VINF_SUCCESS;
7627}
7628
7629
7630/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7631FNIEMOP_DEF(iemOp_fnop)
7632{
7633 IEMOP_MNEMONIC(fnop, "fnop");
7634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7635
7636 IEM_MC_BEGIN(0, 0);
7637 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7638 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7639 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7640 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7641 * intel optimizations. Investigate. */
7642 IEM_MC_UPDATE_FPU_OPCODE_IP();
7643 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7644 IEM_MC_END();
7645 return VINF_SUCCESS;
7646}
7647
7648
7649/** Opcode 0xd9 11/0 stN */
7650FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7651{
7652 IEMOP_MNEMONIC(fld_stN, "fld stN");
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654
7655 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7656 * indicates that it does. */
7657 IEM_MC_BEGIN(0, 2);
7658 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7659 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7662
7663 IEM_MC_PREPARE_FPU_USAGE();
7664 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm))
7665 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7666 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7667 IEM_MC_ELSE()
7668 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7669 IEM_MC_ENDIF();
7670
7671 IEM_MC_ADVANCE_RIP();
7672 IEM_MC_END();
7673
7674 return VINF_SUCCESS;
7675}
7676
7677
7678/** Opcode 0xd9 11/3 stN */
7679FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7680{
7681 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7683
7684 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7685 * indicates that it does. */
7686 IEM_MC_BEGIN(1, 3);
7687 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7688 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7689 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7690 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
7691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7693
7694 IEM_MC_PREPARE_FPU_USAGE();
7695 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7696 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7697 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
7698 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7699 IEM_MC_ELSE()
7700 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7701 IEM_MC_ENDIF();
7702
7703 IEM_MC_ADVANCE_RIP();
7704 IEM_MC_END();
7705
7706 return VINF_SUCCESS;
7707}
7708
7709
7710/** Opcode 0xd9 11/4, 0xdd 11/2. */
7711FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7712{
7713 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7715
7716 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7717 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
7718 if (!iDstReg)
7719 {
7720 IEM_MC_BEGIN(0, 1);
7721 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7724
7725 IEM_MC_PREPARE_FPU_USAGE();
7726 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7727 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7728 IEM_MC_ELSE()
7729 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7730 IEM_MC_ENDIF();
7731
7732 IEM_MC_ADVANCE_RIP();
7733 IEM_MC_END();
7734 }
7735 else
7736 {
7737 IEM_MC_BEGIN(0, 2);
7738 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7739 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7741 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7742
7743 IEM_MC_PREPARE_FPU_USAGE();
7744 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7745 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7746 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7747 IEM_MC_ELSE()
7748 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7749 IEM_MC_ENDIF();
7750
7751 IEM_MC_ADVANCE_RIP();
7752 IEM_MC_END();
7753 }
7754 return VINF_SUCCESS;
7755}
7756
7757
7758/**
7759 * Common worker for FPU instructions working on ST0 and replaces it with the
7760 * result, i.e. unary operators.
7761 *
7762 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7763 */
7764FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7765{
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767
7768 IEM_MC_BEGIN(2, 1);
7769 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7770 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7771 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7772
7773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7775 IEM_MC_PREPARE_FPU_USAGE();
7776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7777 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7778 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7779 IEM_MC_ELSE()
7780 IEM_MC_FPU_STACK_UNDERFLOW(0);
7781 IEM_MC_ENDIF();
7782 IEM_MC_ADVANCE_RIP();
7783
7784 IEM_MC_END();
7785 return VINF_SUCCESS;
7786}
7787
7788
7789/** Opcode 0xd9 0xe0. */
7790FNIEMOP_DEF(iemOp_fchs)
7791{
7792 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7793 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7794}
7795
7796
7797/** Opcode 0xd9 0xe1. */
7798FNIEMOP_DEF(iemOp_fabs)
7799{
7800 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7801 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7802}
7803
7804
7805/** Opcode 0xd9 0xe4. */
7806FNIEMOP_DEF(iemOp_ftst)
7807{
7808 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7810
7811 IEM_MC_BEGIN(2, 1);
7812 IEM_MC_LOCAL(uint16_t, u16Fsw);
7813 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7814 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7815
7816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7818 IEM_MC_PREPARE_FPU_USAGE();
7819 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7820 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
7821 IEM_MC_UPDATE_FSW(u16Fsw);
7822 IEM_MC_ELSE()
7823 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7824 IEM_MC_ENDIF();
7825 IEM_MC_ADVANCE_RIP();
7826
7827 IEM_MC_END();
7828 return VINF_SUCCESS;
7829}
7830
7831
7832/** Opcode 0xd9 0xe5. */
7833FNIEMOP_DEF(iemOp_fxam)
7834{
7835 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7837
7838 IEM_MC_BEGIN(2, 1);
7839 IEM_MC_LOCAL(uint16_t, u16Fsw);
7840 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7841 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7842
7843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7845 IEM_MC_PREPARE_FPU_USAGE();
7846 IEM_MC_REF_FPUREG(pr80Value, 0);
7847 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
7848 IEM_MC_UPDATE_FSW(u16Fsw);
7849 IEM_MC_ADVANCE_RIP();
7850
7851 IEM_MC_END();
7852 return VINF_SUCCESS;
7853}
7854
7855
7856/**
7857 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7858 *
7859 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7860 */
7861FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7862{
7863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7864
7865 IEM_MC_BEGIN(1, 1);
7866 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7867 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7868
7869 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7870 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7871 IEM_MC_PREPARE_FPU_USAGE();
7872 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7873 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7874 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7875 IEM_MC_ELSE()
7876 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7877 IEM_MC_ENDIF();
7878 IEM_MC_ADVANCE_RIP();
7879
7880 IEM_MC_END();
7881 return VINF_SUCCESS;
7882}
7883
7884
7885/** Opcode 0xd9 0xe8. */
7886FNIEMOP_DEF(iemOp_fld1)
7887{
7888 IEMOP_MNEMONIC(fld1, "fld1");
7889 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7890}
7891
7892
7893/** Opcode 0xd9 0xe9. */
7894FNIEMOP_DEF(iemOp_fldl2t)
7895{
7896 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7897 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7898}
7899
7900
7901/** Opcode 0xd9 0xea. */
7902FNIEMOP_DEF(iemOp_fldl2e)
7903{
7904 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7905 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7906}
7907
7908/** Opcode 0xd9 0xeb. */
7909FNIEMOP_DEF(iemOp_fldpi)
7910{
7911 IEMOP_MNEMONIC(fldpi, "fldpi");
7912 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7913}
7914
7915
7916/** Opcode 0xd9 0xec. */
7917FNIEMOP_DEF(iemOp_fldlg2)
7918{
7919 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7920 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7921}
7922
7923/** Opcode 0xd9 0xed. */
7924FNIEMOP_DEF(iemOp_fldln2)
7925{
7926 IEMOP_MNEMONIC(fldln2, "fldln2");
7927 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7928}
7929
7930
7931/** Opcode 0xd9 0xee. */
7932FNIEMOP_DEF(iemOp_fldz)
7933{
7934 IEMOP_MNEMONIC(fldz, "fldz");
7935 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7936}
7937
7938
7939/** Opcode 0xd9 0xf0.
7940 *
7941 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
7942 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
7943 * to produce proper results for +Inf and -Inf.
7944 *
7945 * This is probably usful in the implementation pow() and similar.
7946 */
7947FNIEMOP_DEF(iemOp_f2xm1)
7948{
7949 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7950 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7951}
7952
7953
7954/**
7955 * Common worker for FPU instructions working on STn and ST0, storing the result
7956 * in STn, and popping the stack unless IE, DE or ZE was raised.
7957 *
7958 * @param bRm Mod R/M byte.
7959 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7960 */
7961FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7962{
7963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7964
7965 IEM_MC_BEGIN(3, 1);
7966 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7967 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7968 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7969 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7970
7971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7973
7974 IEM_MC_PREPARE_FPU_USAGE();
7975 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
7976 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7977 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
7978 IEM_MC_ELSE()
7979 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
7980 IEM_MC_ENDIF();
7981 IEM_MC_ADVANCE_RIP();
7982
7983 IEM_MC_END();
7984 return VINF_SUCCESS;
7985}
7986
7987
7988/** Opcode 0xd9 0xf1. */
7989FNIEMOP_DEF(iemOp_fyl2x)
7990{
7991 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7992 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7993}
7994
7995
7996/**
7997 * Common worker for FPU instructions working on ST0 and having two outputs, one
7998 * replacing ST0 and one pushed onto the stack.
7999 *
8000 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8001 */
8002FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8003{
8004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8005
8006 IEM_MC_BEGIN(2, 1);
8007 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8008 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8010
8011 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8012 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8013 IEM_MC_PREPARE_FPU_USAGE();
8014 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8015 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8016 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8017 IEM_MC_ELSE()
8018 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8019 IEM_MC_ENDIF();
8020 IEM_MC_ADVANCE_RIP();
8021
8022 IEM_MC_END();
8023 return VINF_SUCCESS;
8024}
8025
8026
8027/** Opcode 0xd9 0xf2. */
8028FNIEMOP_DEF(iemOp_fptan)
8029{
8030 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8031 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8032}
8033
8034
8035/** Opcode 0xd9 0xf3. */
8036FNIEMOP_DEF(iemOp_fpatan)
8037{
8038 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8039 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8040}
8041
8042
8043/** Opcode 0xd9 0xf4. */
8044FNIEMOP_DEF(iemOp_fxtract)
8045{
8046 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8047 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8048}
8049
8050
8051/** Opcode 0xd9 0xf5. */
8052FNIEMOP_DEF(iemOp_fprem1)
8053{
8054 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8055 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8056}
8057
8058
8059/** Opcode 0xd9 0xf6. */
8060FNIEMOP_DEF(iemOp_fdecstp)
8061{
8062 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8065 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8066 * FINCSTP and FDECSTP. */
8067
8068 IEM_MC_BEGIN(0,0);
8069
8070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8072
8073 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8074 IEM_MC_FPU_STACK_DEC_TOP();
8075 IEM_MC_UPDATE_FSW_CONST(0);
8076
8077 IEM_MC_ADVANCE_RIP();
8078 IEM_MC_END();
8079 return VINF_SUCCESS;
8080}
8081
8082
8083/** Opcode 0xd9 0xf7. */
8084FNIEMOP_DEF(iemOp_fincstp)
8085{
8086 IEMOP_MNEMONIC(fincstp, "fincstp");
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8089 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8090 * FINCSTP and FDECSTP. */
8091
8092 IEM_MC_BEGIN(0,0);
8093
8094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8096
8097 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8098 IEM_MC_FPU_STACK_INC_TOP();
8099 IEM_MC_UPDATE_FSW_CONST(0);
8100
8101 IEM_MC_ADVANCE_RIP();
8102 IEM_MC_END();
8103 return VINF_SUCCESS;
8104}
8105
8106
8107/** Opcode 0xd9 0xf8. */
8108FNIEMOP_DEF(iemOp_fprem)
8109{
8110 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8111 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8112}
8113
8114
8115/** Opcode 0xd9 0xf9. */
8116FNIEMOP_DEF(iemOp_fyl2xp1)
8117{
8118 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8119 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8120}
8121
8122
8123/** Opcode 0xd9 0xfa. */
8124FNIEMOP_DEF(iemOp_fsqrt)
8125{
8126 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8127 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8128}
8129
8130
8131/** Opcode 0xd9 0xfb. */
8132FNIEMOP_DEF(iemOp_fsincos)
8133{
8134 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8135 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8136}
8137
8138
8139/** Opcode 0xd9 0xfc. */
8140FNIEMOP_DEF(iemOp_frndint)
8141{
8142 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8143 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8144}
8145
8146
8147/** Opcode 0xd9 0xfd. */
8148FNIEMOP_DEF(iemOp_fscale)
8149{
8150 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8151 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8152}
8153
8154
8155/** Opcode 0xd9 0xfe. */
8156FNIEMOP_DEF(iemOp_fsin)
8157{
8158 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8159 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8160}
8161
8162
8163/** Opcode 0xd9 0xff. */
8164FNIEMOP_DEF(iemOp_fcos)
8165{
8166 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8167 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8168}
8169
8170
8171/** Used by iemOp_EscF1. */
8172IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8173{
8174 /* 0xe0 */ iemOp_fchs,
8175 /* 0xe1 */ iemOp_fabs,
8176 /* 0xe2 */ iemOp_Invalid,
8177 /* 0xe3 */ iemOp_Invalid,
8178 /* 0xe4 */ iemOp_ftst,
8179 /* 0xe5 */ iemOp_fxam,
8180 /* 0xe6 */ iemOp_Invalid,
8181 /* 0xe7 */ iemOp_Invalid,
8182 /* 0xe8 */ iemOp_fld1,
8183 /* 0xe9 */ iemOp_fldl2t,
8184 /* 0xea */ iemOp_fldl2e,
8185 /* 0xeb */ iemOp_fldpi,
8186 /* 0xec */ iemOp_fldlg2,
8187 /* 0xed */ iemOp_fldln2,
8188 /* 0xee */ iemOp_fldz,
8189 /* 0xef */ iemOp_Invalid,
8190 /* 0xf0 */ iemOp_f2xm1,
8191 /* 0xf1 */ iemOp_fyl2x,
8192 /* 0xf2 */ iemOp_fptan,
8193 /* 0xf3 */ iemOp_fpatan,
8194 /* 0xf4 */ iemOp_fxtract,
8195 /* 0xf5 */ iemOp_fprem1,
8196 /* 0xf6 */ iemOp_fdecstp,
8197 /* 0xf7 */ iemOp_fincstp,
8198 /* 0xf8 */ iemOp_fprem,
8199 /* 0xf9 */ iemOp_fyl2xp1,
8200 /* 0xfa */ iemOp_fsqrt,
8201 /* 0xfb */ iemOp_fsincos,
8202 /* 0xfc */ iemOp_frndint,
8203 /* 0xfd */ iemOp_fscale,
8204 /* 0xfe */ iemOp_fsin,
8205 /* 0xff */ iemOp_fcos
8206};
8207
8208
8209/**
8210 * @opcode 0xd9
8211 */
8212FNIEMOP_DEF(iemOp_EscF1)
8213{
8214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8215 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8216
8217 if (IEM_IS_MODRM_REG_MODE(bRm))
8218 {
8219 switch (IEM_GET_MODRM_REG_8(bRm))
8220 {
8221 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8222 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8223 case 2:
8224 if (bRm == 0xd0)
8225 return FNIEMOP_CALL(iemOp_fnop);
8226 return IEMOP_RAISE_INVALID_OPCODE();
8227 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8228 case 4:
8229 case 5:
8230 case 6:
8231 case 7:
8232 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8233 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8235 }
8236 }
8237 else
8238 {
8239 switch (IEM_GET_MODRM_REG_8(bRm))
8240 {
8241 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8242 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8243 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8244 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8245 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8246 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8247 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8248 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8250 }
8251 }
8252}
8253
8254
8255/** Opcode 0xda 11/0. */
8256FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8257{
8258 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8260
8261 IEM_MC_BEGIN(0, 1);
8262 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8263
8264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8266
8267 IEM_MC_PREPARE_FPU_USAGE();
8268 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8270 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8271 IEM_MC_ENDIF();
8272 IEM_MC_UPDATE_FPU_OPCODE_IP();
8273 IEM_MC_ELSE()
8274 IEM_MC_FPU_STACK_UNDERFLOW(0);
8275 IEM_MC_ENDIF();
8276 IEM_MC_ADVANCE_RIP();
8277
8278 IEM_MC_END();
8279 return VINF_SUCCESS;
8280}
8281
8282
8283/** Opcode 0xda 11/1. */
8284FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8285{
8286 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8288
8289 IEM_MC_BEGIN(0, 1);
8290 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8291
8292 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8293 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8294
8295 IEM_MC_PREPARE_FPU_USAGE();
8296 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8298 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8299 IEM_MC_ENDIF();
8300 IEM_MC_UPDATE_FPU_OPCODE_IP();
8301 IEM_MC_ELSE()
8302 IEM_MC_FPU_STACK_UNDERFLOW(0);
8303 IEM_MC_ENDIF();
8304 IEM_MC_ADVANCE_RIP();
8305
8306 IEM_MC_END();
8307 return VINF_SUCCESS;
8308}
8309
8310
8311/** Opcode 0xda 11/2. */
8312FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8313{
8314 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8316
8317 IEM_MC_BEGIN(0, 1);
8318 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8319
8320 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8321 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8322
8323 IEM_MC_PREPARE_FPU_USAGE();
8324 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8325 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8326 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8327 IEM_MC_ENDIF();
8328 IEM_MC_UPDATE_FPU_OPCODE_IP();
8329 IEM_MC_ELSE()
8330 IEM_MC_FPU_STACK_UNDERFLOW(0);
8331 IEM_MC_ENDIF();
8332 IEM_MC_ADVANCE_RIP();
8333
8334 IEM_MC_END();
8335 return VINF_SUCCESS;
8336}
8337
8338
8339/** Opcode 0xda 11/3. */
8340FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8341{
8342 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8344
8345 IEM_MC_BEGIN(0, 1);
8346 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8347
8348 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8349 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8350
8351 IEM_MC_PREPARE_FPU_USAGE();
8352 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8353 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8354 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8355 IEM_MC_ENDIF();
8356 IEM_MC_UPDATE_FPU_OPCODE_IP();
8357 IEM_MC_ELSE()
8358 IEM_MC_FPU_STACK_UNDERFLOW(0);
8359 IEM_MC_ENDIF();
8360 IEM_MC_ADVANCE_RIP();
8361
8362 IEM_MC_END();
8363 return VINF_SUCCESS;
8364}
8365
8366
8367/**
8368 * Common worker for FPU instructions working on ST0 and STn, only affecting
8369 * flags, and popping twice when done.
8370 *
8371 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8372 */
8373FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8374{
8375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8376
8377 IEM_MC_BEGIN(3, 1);
8378 IEM_MC_LOCAL(uint16_t, u16Fsw);
8379 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8380 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8381 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8382
8383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8385
8386 IEM_MC_PREPARE_FPU_USAGE();
8387 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8388 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8389 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8390 IEM_MC_ELSE()
8391 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8392 IEM_MC_ENDIF();
8393 IEM_MC_ADVANCE_RIP();
8394
8395 IEM_MC_END();
8396 return VINF_SUCCESS;
8397}
8398
8399
8400/** Opcode 0xda 0xe9. */
8401FNIEMOP_DEF(iemOp_fucompp)
8402{
8403 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8404 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8405}
8406
8407
8408/**
8409 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8410 * the result in ST0.
8411 *
8412 * @param bRm Mod R/M byte.
8413 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8414 */
8415FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8416{
8417 IEM_MC_BEGIN(3, 3);
8418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8419 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8420 IEM_MC_LOCAL(int32_t, i32Val2);
8421 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8422 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8423 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8424
8425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8427
8428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8430 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8431
8432 IEM_MC_PREPARE_FPU_USAGE();
8433 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8434 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8435 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8436 IEM_MC_ELSE()
8437 IEM_MC_FPU_STACK_UNDERFLOW(0);
8438 IEM_MC_ENDIF();
8439 IEM_MC_ADVANCE_RIP();
8440
8441 IEM_MC_END();
8442 return VINF_SUCCESS;
8443}
8444
8445
8446/** Opcode 0xda !11/0. */
8447FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8448{
8449 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8450 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8451}
8452
8453
8454/** Opcode 0xda !11/1. */
8455FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8456{
8457 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8458 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8459}
8460
8461
8462/** Opcode 0xda !11/2. */
8463FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8464{
8465 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8466
8467 IEM_MC_BEGIN(3, 3);
8468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8469 IEM_MC_LOCAL(uint16_t, u16Fsw);
8470 IEM_MC_LOCAL(int32_t, i32Val2);
8471 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8472 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8473 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8474
8475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477
8478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8480 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8481
8482 IEM_MC_PREPARE_FPU_USAGE();
8483 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8484 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8485 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8486 IEM_MC_ELSE()
8487 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8488 IEM_MC_ENDIF();
8489 IEM_MC_ADVANCE_RIP();
8490
8491 IEM_MC_END();
8492 return VINF_SUCCESS;
8493}
8494
8495
8496/** Opcode 0xda !11/3. */
8497FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8498{
8499 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8500
8501 IEM_MC_BEGIN(3, 3);
8502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8503 IEM_MC_LOCAL(uint16_t, u16Fsw);
8504 IEM_MC_LOCAL(int32_t, i32Val2);
8505 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8506 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8507 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8508
8509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8511
8512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8513 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8514 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8515
8516 IEM_MC_PREPARE_FPU_USAGE();
8517 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8518 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8519 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8520 IEM_MC_ELSE()
8521 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8522 IEM_MC_ENDIF();
8523 IEM_MC_ADVANCE_RIP();
8524
8525 IEM_MC_END();
8526 return VINF_SUCCESS;
8527}
8528
8529
8530/** Opcode 0xda !11/4. */
8531FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8532{
8533 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8534 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8535}
8536
8537
8538/** Opcode 0xda !11/5. */
8539FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8540{
8541 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8542 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8543}
8544
8545
8546/** Opcode 0xda !11/6. */
8547FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8548{
8549 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8550 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8551}
8552
8553
8554/** Opcode 0xda !11/7. */
8555FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8556{
8557 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8558 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8559}
8560
8561
8562/**
8563 * @opcode 0xda
8564 */
8565FNIEMOP_DEF(iemOp_EscF2)
8566{
8567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8568 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8569 if (IEM_IS_MODRM_REG_MODE(bRm))
8570 {
8571 switch (IEM_GET_MODRM_REG_8(bRm))
8572 {
8573 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8574 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8575 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8576 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8577 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8578 case 5:
8579 if (bRm == 0xe9)
8580 return FNIEMOP_CALL(iemOp_fucompp);
8581 return IEMOP_RAISE_INVALID_OPCODE();
8582 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8583 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8585 }
8586 }
8587 else
8588 {
8589 switch (IEM_GET_MODRM_REG_8(bRm))
8590 {
8591 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8592 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8593 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8594 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8595 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8596 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8597 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8598 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8600 }
8601 }
8602}
8603
8604
8605/** Opcode 0xdb !11/0. */
8606FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8607{
8608 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8609
8610 IEM_MC_BEGIN(2, 3);
8611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8612 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8613 IEM_MC_LOCAL(int32_t, i32Val);
8614 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8615 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8616
8617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619
8620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8622 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8623
8624 IEM_MC_PREPARE_FPU_USAGE();
8625 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8626 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
8627 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8628 IEM_MC_ELSE()
8629 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8630 IEM_MC_ENDIF();
8631 IEM_MC_ADVANCE_RIP();
8632
8633 IEM_MC_END();
8634 return VINF_SUCCESS;
8635}
8636
8637
8638/** Opcode 0xdb !11/1. */
8639FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8640{
8641 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8642 IEM_MC_BEGIN(3, 2);
8643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8644 IEM_MC_LOCAL(uint16_t, u16Fsw);
8645 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8646 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8647 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8648
8649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8653
8654 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8655 IEM_MC_PREPARE_FPU_USAGE();
8656 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8657 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8658 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8659 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8660 IEM_MC_ELSE()
8661 IEM_MC_IF_FCW_IM()
8662 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8663 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8664 IEM_MC_ENDIF();
8665 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8666 IEM_MC_ENDIF();
8667 IEM_MC_ADVANCE_RIP();
8668
8669 IEM_MC_END();
8670 return VINF_SUCCESS;
8671}
8672
8673
8674/** Opcode 0xdb !11/2. */
8675FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8676{
8677 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8678 IEM_MC_BEGIN(3, 2);
8679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8680 IEM_MC_LOCAL(uint16_t, u16Fsw);
8681 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8682 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8683 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8684
8685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8689
8690 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8691 IEM_MC_PREPARE_FPU_USAGE();
8692 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8693 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8694 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8695 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8696 IEM_MC_ELSE()
8697 IEM_MC_IF_FCW_IM()
8698 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8699 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8700 IEM_MC_ENDIF();
8701 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8702 IEM_MC_ENDIF();
8703 IEM_MC_ADVANCE_RIP();
8704
8705 IEM_MC_END();
8706 return VINF_SUCCESS;
8707}
8708
8709
8710/** Opcode 0xdb !11/3. */
8711FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8712{
8713 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8714 IEM_MC_BEGIN(3, 2);
8715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8716 IEM_MC_LOCAL(uint16_t, u16Fsw);
8717 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8718 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8720
8721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8725
8726 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8727 IEM_MC_PREPARE_FPU_USAGE();
8728 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8729 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8730 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8731 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8732 IEM_MC_ELSE()
8733 IEM_MC_IF_FCW_IM()
8734 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8735 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8736 IEM_MC_ENDIF();
8737 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8738 IEM_MC_ENDIF();
8739 IEM_MC_ADVANCE_RIP();
8740
8741 IEM_MC_END();
8742 return VINF_SUCCESS;
8743}
8744
8745
8746/** Opcode 0xdb !11/5. */
8747FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8748{
8749 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8750
8751 IEM_MC_BEGIN(2, 3);
8752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8753 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8754 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8755 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8756 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8757
8758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8760
8761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8763 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8764
8765 IEM_MC_PREPARE_FPU_USAGE();
8766 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8767 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8768 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8769 IEM_MC_ELSE()
8770 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8771 IEM_MC_ENDIF();
8772 IEM_MC_ADVANCE_RIP();
8773
8774 IEM_MC_END();
8775 return VINF_SUCCESS;
8776}
8777
8778
8779/** Opcode 0xdb !11/7. */
8780FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8781{
8782 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8783 IEM_MC_BEGIN(3, 2);
8784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8785 IEM_MC_LOCAL(uint16_t, u16Fsw);
8786 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8787 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8788 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8789
8790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8792 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8793 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8794
8795 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
8796 IEM_MC_PREPARE_FPU_USAGE();
8797 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8798 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8799 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8800 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8801 IEM_MC_ELSE()
8802 IEM_MC_IF_FCW_IM()
8803 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8804 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8805 IEM_MC_ENDIF();
8806 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8807 IEM_MC_ENDIF();
8808 IEM_MC_ADVANCE_RIP();
8809
8810 IEM_MC_END();
8811 return VINF_SUCCESS;
8812}
8813
8814
8815/** Opcode 0xdb 11/0. */
8816FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8817{
8818 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820
8821 IEM_MC_BEGIN(0, 1);
8822 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8823
8824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8825 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8826
8827 IEM_MC_PREPARE_FPU_USAGE();
8828 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8829 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8830 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8831 IEM_MC_ENDIF();
8832 IEM_MC_UPDATE_FPU_OPCODE_IP();
8833 IEM_MC_ELSE()
8834 IEM_MC_FPU_STACK_UNDERFLOW(0);
8835 IEM_MC_ENDIF();
8836 IEM_MC_ADVANCE_RIP();
8837
8838 IEM_MC_END();
8839 return VINF_SUCCESS;
8840}
8841
8842
8843/** Opcode 0xdb 11/1. */
8844FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8845{
8846 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8848
8849 IEM_MC_BEGIN(0, 1);
8850 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8851
8852 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8853 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8854
8855 IEM_MC_PREPARE_FPU_USAGE();
8856 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8857 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8858 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8859 IEM_MC_ENDIF();
8860 IEM_MC_UPDATE_FPU_OPCODE_IP();
8861 IEM_MC_ELSE()
8862 IEM_MC_FPU_STACK_UNDERFLOW(0);
8863 IEM_MC_ENDIF();
8864 IEM_MC_ADVANCE_RIP();
8865
8866 IEM_MC_END();
8867 return VINF_SUCCESS;
8868}
8869
8870
8871/** Opcode 0xdb 11/2. */
8872FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8873{
8874 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8876
8877 IEM_MC_BEGIN(0, 1);
8878 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8879
8880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8882
8883 IEM_MC_PREPARE_FPU_USAGE();
8884 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8885 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8886 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8887 IEM_MC_ENDIF();
8888 IEM_MC_UPDATE_FPU_OPCODE_IP();
8889 IEM_MC_ELSE()
8890 IEM_MC_FPU_STACK_UNDERFLOW(0);
8891 IEM_MC_ENDIF();
8892 IEM_MC_ADVANCE_RIP();
8893
8894 IEM_MC_END();
8895 return VINF_SUCCESS;
8896}
8897
8898
8899/** Opcode 0xdb 11/3. */
8900FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8901{
8902 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904
8905 IEM_MC_BEGIN(0, 1);
8906 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8907
8908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8909 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8910
8911 IEM_MC_PREPARE_FPU_USAGE();
8912 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8913 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8914 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8915 IEM_MC_ENDIF();
8916 IEM_MC_UPDATE_FPU_OPCODE_IP();
8917 IEM_MC_ELSE()
8918 IEM_MC_FPU_STACK_UNDERFLOW(0);
8919 IEM_MC_ENDIF();
8920 IEM_MC_ADVANCE_RIP();
8921
8922 IEM_MC_END();
8923 return VINF_SUCCESS;
8924}
8925
8926
8927/** Opcode 0xdb 0xe0. */
8928FNIEMOP_DEF(iemOp_fneni)
8929{
8930 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932 IEM_MC_BEGIN(0,0);
8933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8934 IEM_MC_ADVANCE_RIP();
8935 IEM_MC_END();
8936 return VINF_SUCCESS;
8937}
8938
8939
8940/** Opcode 0xdb 0xe1. */
8941FNIEMOP_DEF(iemOp_fndisi)
8942{
8943 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8945 IEM_MC_BEGIN(0,0);
8946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8947 IEM_MC_ADVANCE_RIP();
8948 IEM_MC_END();
8949 return VINF_SUCCESS;
8950}
8951
8952
8953/** Opcode 0xdb 0xe2. */
8954FNIEMOP_DEF(iemOp_fnclex)
8955{
8956 IEMOP_MNEMONIC(fnclex, "fnclex");
8957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8958
8959 IEM_MC_BEGIN(0,0);
8960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8961 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8962 IEM_MC_CLEAR_FSW_EX();
8963 IEM_MC_ADVANCE_RIP();
8964 IEM_MC_END();
8965 return VINF_SUCCESS;
8966}
8967
8968
8969/** Opcode 0xdb 0xe3. */
8970FNIEMOP_DEF(iemOp_fninit)
8971{
8972 IEMOP_MNEMONIC(fninit, "fninit");
8973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8974 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8975}
8976
8977
8978/** Opcode 0xdb 0xe4. */
8979FNIEMOP_DEF(iemOp_fnsetpm)
8980{
8981 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8983 IEM_MC_BEGIN(0,0);
8984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8985 IEM_MC_ADVANCE_RIP();
8986 IEM_MC_END();
8987 return VINF_SUCCESS;
8988}
8989
8990
8991/** Opcode 0xdb 0xe5. */
8992FNIEMOP_DEF(iemOp_frstpm)
8993{
8994 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8995#if 0 /* #UDs on newer CPUs */
8996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8997 IEM_MC_BEGIN(0,0);
8998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8999 IEM_MC_ADVANCE_RIP();
9000 IEM_MC_END();
9001 return VINF_SUCCESS;
9002#else
9003 return IEMOP_RAISE_INVALID_OPCODE();
9004#endif
9005}
9006
9007
9008/** Opcode 0xdb 11/5. */
9009FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9010{
9011 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9012 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
9013}
9014
9015
9016/** Opcode 0xdb 11/6. */
9017FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9018{
9019 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9020 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
9021}
9022
9023
9024/**
9025 * @opcode 0xdb
9026 */
9027FNIEMOP_DEF(iemOp_EscF3)
9028{
9029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9030 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9031 if (IEM_IS_MODRM_REG_MODE(bRm))
9032 {
9033 switch (IEM_GET_MODRM_REG_8(bRm))
9034 {
9035 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9036 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9037 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9038 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9039 case 4:
9040 switch (bRm)
9041 {
9042 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9043 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9044 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9045 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9046 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9047 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9048 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9049 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9051 }
9052 break;
9053 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9054 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9055 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9057 }
9058 }
9059 else
9060 {
9061 switch (IEM_GET_MODRM_REG_8(bRm))
9062 {
9063 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9064 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9065 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9066 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9067 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9068 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9069 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9070 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9072 }
9073 }
9074}
9075
9076
9077/**
9078 * Common worker for FPU instructions working on STn and ST0, and storing the
9079 * result in STn unless IE, DE or ZE was raised.
9080 *
9081 * @param bRm Mod R/M byte.
9082 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9083 */
9084FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9085{
9086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9087
9088 IEM_MC_BEGIN(3, 1);
9089 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9090 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9091 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9092 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9093
9094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9096
9097 IEM_MC_PREPARE_FPU_USAGE();
9098 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
9099 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9100 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9101 IEM_MC_ELSE()
9102 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9103 IEM_MC_ENDIF();
9104 IEM_MC_ADVANCE_RIP();
9105
9106 IEM_MC_END();
9107 return VINF_SUCCESS;
9108}
9109
9110
9111/** Opcode 0xdc 11/0. */
9112FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9113{
9114 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9115 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9116}
9117
9118
9119/** Opcode 0xdc 11/1. */
9120FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9121{
9122 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9123 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9124}
9125
9126
9127/** Opcode 0xdc 11/4. */
9128FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9129{
9130 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9131 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9132}
9133
9134
9135/** Opcode 0xdc 11/5. */
9136FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9137{
9138 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9139 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9140}
9141
9142
9143/** Opcode 0xdc 11/6. */
9144FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9145{
9146 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9147 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9148}
9149
9150
9151/** Opcode 0xdc 11/7. */
9152FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9153{
9154 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9155 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9156}
9157
9158
9159/**
9160 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9161 * memory operand, and storing the result in ST0.
9162 *
9163 * @param bRm Mod R/M byte.
9164 * @param pfnImpl Pointer to the instruction implementation (assembly).
9165 */
9166FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9167{
9168 IEM_MC_BEGIN(3, 3);
9169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9170 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9171 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9172 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9173 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9174 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9175
9176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9178 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9179 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9180
9181 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9182 IEM_MC_PREPARE_FPU_USAGE();
9183 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9184 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9185 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9186 IEM_MC_ELSE()
9187 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9188 IEM_MC_ENDIF();
9189 IEM_MC_ADVANCE_RIP();
9190
9191 IEM_MC_END();
9192 return VINF_SUCCESS;
9193}
9194
9195
9196/** Opcode 0xdc !11/0. */
9197FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9198{
9199 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9200 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9201}
9202
9203
9204/** Opcode 0xdc !11/1. */
9205FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9206{
9207 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9208 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9209}
9210
9211
9212/** Opcode 0xdc !11/2. */
9213FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9214{
9215 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9216
9217 IEM_MC_BEGIN(3, 3);
9218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9219 IEM_MC_LOCAL(uint16_t, u16Fsw);
9220 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9221 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9222 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9223 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9224
9225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9227
9228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9230 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9231
9232 IEM_MC_PREPARE_FPU_USAGE();
9233 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9234 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9235 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9236 IEM_MC_ELSE()
9237 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9238 IEM_MC_ENDIF();
9239 IEM_MC_ADVANCE_RIP();
9240
9241 IEM_MC_END();
9242 return VINF_SUCCESS;
9243}
9244
9245
9246/** Opcode 0xdc !11/3. */
9247FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9248{
9249 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9250
9251 IEM_MC_BEGIN(3, 3);
9252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9253 IEM_MC_LOCAL(uint16_t, u16Fsw);
9254 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9255 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9256 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9257 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9258
9259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9261
9262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9263 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9264 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9265
9266 IEM_MC_PREPARE_FPU_USAGE();
9267 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9268 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9269 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9270 IEM_MC_ELSE()
9271 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9272 IEM_MC_ENDIF();
9273 IEM_MC_ADVANCE_RIP();
9274
9275 IEM_MC_END();
9276 return VINF_SUCCESS;
9277}
9278
9279
9280/** Opcode 0xdc !11/4. */
9281FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9282{
9283 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9284 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9285}
9286
9287
9288/** Opcode 0xdc !11/5. */
9289FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9290{
9291 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9292 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9293}
9294
9295
9296/** Opcode 0xdc !11/6. */
9297FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9298{
9299 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9300 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9301}
9302
9303
9304/** Opcode 0xdc !11/7. */
9305FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9306{
9307 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9308 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9309}
9310
9311
9312/**
9313 * @opcode 0xdc
9314 */
9315FNIEMOP_DEF(iemOp_EscF4)
9316{
9317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9318 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9319 if (IEM_IS_MODRM_REG_MODE(bRm))
9320 {
9321 switch (IEM_GET_MODRM_REG_8(bRm))
9322 {
9323 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9324 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9325 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9326 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9327 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9328 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9329 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9330 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9332 }
9333 }
9334 else
9335 {
9336 switch (IEM_GET_MODRM_REG_8(bRm))
9337 {
9338 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9339 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9340 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9341 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9342 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9343 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9344 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9345 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9347 }
9348 }
9349}
9350
9351
9352/** Opcode 0xdd !11/0.
9353 * @sa iemOp_fld_m32r */
9354FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9355{
9356 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9357
9358 IEM_MC_BEGIN(2, 3);
9359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9360 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9361 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9362 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9363 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9364
9365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9368 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9369
9370 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9371 IEM_MC_PREPARE_FPU_USAGE();
9372 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9373 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
9374 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9375 IEM_MC_ELSE()
9376 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9377 IEM_MC_ENDIF();
9378 IEM_MC_ADVANCE_RIP();
9379
9380 IEM_MC_END();
9381 return VINF_SUCCESS;
9382}
9383
9384
9385/** Opcode 0xdd !11/0. */
9386FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9387{
9388 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9389 IEM_MC_BEGIN(3, 2);
9390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9391 IEM_MC_LOCAL(uint16_t, u16Fsw);
9392 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9393 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9394 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9395
9396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9400
9401 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9402 IEM_MC_PREPARE_FPU_USAGE();
9403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9404 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9405 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9406 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9407 IEM_MC_ELSE()
9408 IEM_MC_IF_FCW_IM()
9409 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9410 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9411 IEM_MC_ENDIF();
9412 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9413 IEM_MC_ENDIF();
9414 IEM_MC_ADVANCE_RIP();
9415
9416 IEM_MC_END();
9417 return VINF_SUCCESS;
9418}
9419
9420
9421/** Opcode 0xdd !11/0. */
9422FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9423{
9424 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9425 IEM_MC_BEGIN(3, 2);
9426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9427 IEM_MC_LOCAL(uint16_t, u16Fsw);
9428 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9429 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9430 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9431
9432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9436
9437 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9438 IEM_MC_PREPARE_FPU_USAGE();
9439 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9440 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9441 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9442 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9443 IEM_MC_ELSE()
9444 IEM_MC_IF_FCW_IM()
9445 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9446 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9447 IEM_MC_ENDIF();
9448 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9449 IEM_MC_ENDIF();
9450 IEM_MC_ADVANCE_RIP();
9451
9452 IEM_MC_END();
9453 return VINF_SUCCESS;
9454}
9455
9456
9457
9458
9459/** Opcode 0xdd !11/0. */
9460FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9461{
9462 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9463 IEM_MC_BEGIN(3, 2);
9464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9465 IEM_MC_LOCAL(uint16_t, u16Fsw);
9466 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9467 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9468 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9469
9470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9472 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9473 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9474
9475 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9476 IEM_MC_PREPARE_FPU_USAGE();
9477 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9478 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9479 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9480 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9481 IEM_MC_ELSE()
9482 IEM_MC_IF_FCW_IM()
9483 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9484 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9485 IEM_MC_ENDIF();
9486 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9487 IEM_MC_ENDIF();
9488 IEM_MC_ADVANCE_RIP();
9489
9490 IEM_MC_END();
9491 return VINF_SUCCESS;
9492}
9493
9494
9495/** Opcode 0xdd !11/0. */
9496FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9497{
9498 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9499 IEM_MC_BEGIN(3, 0);
9500 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9501 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9502 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9506 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9507 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9508 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9509 IEM_MC_END();
9510 return VINF_SUCCESS;
9511}
9512
9513
9514/** Opcode 0xdd !11/0. */
9515FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9516{
9517 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9518 IEM_MC_BEGIN(3, 0);
9519 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9520 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9521 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9525 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
9526 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9527 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9528 IEM_MC_END();
9529 return VINF_SUCCESS;
9530
9531}
9532
9533/** Opcode 0xdd !11/0. */
9534FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9535{
9536 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9537
9538 IEM_MC_BEGIN(0, 2);
9539 IEM_MC_LOCAL(uint16_t, u16Tmp);
9540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9541
9542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9545
9546 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9547 IEM_MC_FETCH_FSW(u16Tmp);
9548 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9549 IEM_MC_ADVANCE_RIP();
9550
9551/** @todo Debug / drop a hint to the verifier that things may differ
9552 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9553 * NT4SP1. (X86_FSW_PE) */
9554 IEM_MC_END();
9555 return VINF_SUCCESS;
9556}
9557
9558
9559/** Opcode 0xdd 11/0. */
9560FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9561{
9562 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9564 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9565 unmodified. */
9566
9567 IEM_MC_BEGIN(0, 0);
9568
9569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9570 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9571
9572 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9573 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9574 IEM_MC_UPDATE_FPU_OPCODE_IP();
9575
9576 IEM_MC_ADVANCE_RIP();
9577 IEM_MC_END();
9578 return VINF_SUCCESS;
9579}
9580
9581
9582/** Opcode 0xdd 11/1. */
9583FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9584{
9585 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9587
9588 IEM_MC_BEGIN(0, 2);
9589 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9590 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9592 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9593
9594 IEM_MC_PREPARE_FPU_USAGE();
9595 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9596 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9597 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9598 IEM_MC_ELSE()
9599 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9600 IEM_MC_ENDIF();
9601
9602 IEM_MC_ADVANCE_RIP();
9603 IEM_MC_END();
9604 return VINF_SUCCESS;
9605}
9606
9607
9608/** Opcode 0xdd 11/3. */
9609FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9610{
9611 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9612 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9613}
9614
9615
9616/** Opcode 0xdd 11/4. */
9617FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9618{
9619 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9620 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9621}
9622
9623
9624/**
9625 * @opcode 0xdd
9626 */
9627FNIEMOP_DEF(iemOp_EscF5)
9628{
9629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9630 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9631 if (IEM_IS_MODRM_REG_MODE(bRm))
9632 {
9633 switch (IEM_GET_MODRM_REG_8(bRm))
9634 {
9635 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9636 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9637 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9638 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9639 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9640 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9641 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9642 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9644 }
9645 }
9646 else
9647 {
9648 switch (IEM_GET_MODRM_REG_8(bRm))
9649 {
9650 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9651 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9652 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9653 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9654 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9655 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9656 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9657 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9659 }
9660 }
9661}
9662
9663
9664/** Opcode 0xde 11/0. */
9665FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9666{
9667 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9668 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9669}
9670
9671
9672/** Opcode 0xde 11/0. */
9673FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9674{
9675 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9676 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9677}
9678
9679
9680/** Opcode 0xde 0xd9. */
9681FNIEMOP_DEF(iemOp_fcompp)
9682{
9683 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9684 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9685}
9686
9687
9688/** Opcode 0xde 11/4. */
9689FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9690{
9691 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9692 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9693}
9694
9695
9696/** Opcode 0xde 11/5. */
9697FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9698{
9699 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9700 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9701}
9702
9703
9704/** Opcode 0xde 11/6. */
9705FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9706{
9707 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9708 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9709}
9710
9711
9712/** Opcode 0xde 11/7. */
9713FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9714{
9715 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9716 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9717}
9718
9719
9720/**
9721 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9722 * the result in ST0.
9723 *
9724 * @param bRm Mod R/M byte.
9725 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9726 */
9727FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9728{
9729 IEM_MC_BEGIN(3, 3);
9730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9731 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9732 IEM_MC_LOCAL(int16_t, i16Val2);
9733 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9735 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9736
9737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9739
9740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9741 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9742 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9743
9744 IEM_MC_PREPARE_FPU_USAGE();
9745 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9746 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9747 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9748 IEM_MC_ELSE()
9749 IEM_MC_FPU_STACK_UNDERFLOW(0);
9750 IEM_MC_ENDIF();
9751 IEM_MC_ADVANCE_RIP();
9752
9753 IEM_MC_END();
9754 return VINF_SUCCESS;
9755}
9756
9757
9758/** Opcode 0xde !11/0. */
9759FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9760{
9761 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9762 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9763}
9764
9765
9766/** Opcode 0xde !11/1. */
9767FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9768{
9769 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9770 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9771}
9772
9773
9774/** Opcode 0xde !11/2. */
9775FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9776{
9777 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9778
9779 IEM_MC_BEGIN(3, 3);
9780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9781 IEM_MC_LOCAL(uint16_t, u16Fsw);
9782 IEM_MC_LOCAL(int16_t, i16Val2);
9783 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9785 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9786
9787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9789
9790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9792 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9793
9794 IEM_MC_PREPARE_FPU_USAGE();
9795 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9796 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9797 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9798 IEM_MC_ELSE()
9799 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9800 IEM_MC_ENDIF();
9801 IEM_MC_ADVANCE_RIP();
9802
9803 IEM_MC_END();
9804 return VINF_SUCCESS;
9805}
9806
9807
9808/** Opcode 0xde !11/3. */
9809FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9810{
9811 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9812
9813 IEM_MC_BEGIN(3, 3);
9814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9815 IEM_MC_LOCAL(uint16_t, u16Fsw);
9816 IEM_MC_LOCAL(int16_t, i16Val2);
9817 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9818 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9819 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9820
9821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823
9824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9825 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9826 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9827
9828 IEM_MC_PREPARE_FPU_USAGE();
9829 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9830 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9831 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9832 IEM_MC_ELSE()
9833 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9834 IEM_MC_ENDIF();
9835 IEM_MC_ADVANCE_RIP();
9836
9837 IEM_MC_END();
9838 return VINF_SUCCESS;
9839}
9840
9841
9842/** Opcode 0xde !11/4. */
9843FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9844{
9845 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9846 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9847}
9848
9849
9850/** Opcode 0xde !11/5. */
9851FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9852{
9853 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9854 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9855}
9856
9857
9858/** Opcode 0xde !11/6. */
9859FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9860{
9861 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9862 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9863}
9864
9865
9866/** Opcode 0xde !11/7. */
9867FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9868{
9869 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9870 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9871}
9872
9873
9874/**
9875 * @opcode 0xde
9876 */
9877FNIEMOP_DEF(iemOp_EscF6)
9878{
9879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9880 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9881 if (IEM_IS_MODRM_REG_MODE(bRm))
9882 {
9883 switch (IEM_GET_MODRM_REG_8(bRm))
9884 {
9885 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9886 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9887 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9888 case 3: if (bRm == 0xd9)
9889 return FNIEMOP_CALL(iemOp_fcompp);
9890 return IEMOP_RAISE_INVALID_OPCODE();
9891 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9892 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9893 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9894 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9896 }
9897 }
9898 else
9899 {
9900 switch (IEM_GET_MODRM_REG_8(bRm))
9901 {
9902 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9903 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9904 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9905 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9906 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9907 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9908 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9909 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9911 }
9912 }
9913}
9914
9915
9916/** Opcode 0xdf 11/0.
9917 * Undocument instruction, assumed to work like ffree + fincstp. */
9918FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9919{
9920 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9922
9923 IEM_MC_BEGIN(0, 0);
9924
9925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9926 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9927
9928 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9929 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9930 IEM_MC_FPU_STACK_INC_TOP();
9931 IEM_MC_UPDATE_FPU_OPCODE_IP();
9932
9933 IEM_MC_ADVANCE_RIP();
9934 IEM_MC_END();
9935 return VINF_SUCCESS;
9936}
9937
9938
9939/** Opcode 0xdf 0xe0. */
9940FNIEMOP_DEF(iemOp_fnstsw_ax)
9941{
9942 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9944
9945 IEM_MC_BEGIN(0, 1);
9946 IEM_MC_LOCAL(uint16_t, u16Tmp);
9947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9948 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9949 IEM_MC_FETCH_FSW(u16Tmp);
9950 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9951 IEM_MC_ADVANCE_RIP();
9952 IEM_MC_END();
9953 return VINF_SUCCESS;
9954}
9955
9956
9957/** Opcode 0xdf 11/5. */
9958FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9959{
9960 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9961 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9962}
9963
9964
9965/** Opcode 0xdf 11/6. */
9966FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9967{
9968 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9969 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9970}
9971
9972
9973/** Opcode 0xdf !11/0. */
9974FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9975{
9976 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9977
9978 IEM_MC_BEGIN(2, 3);
9979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9980 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9981 IEM_MC_LOCAL(int16_t, i16Val);
9982 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9983 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9984
9985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9987
9988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9990 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9991
9992 IEM_MC_PREPARE_FPU_USAGE();
9993 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9994 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
9995 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9996 IEM_MC_ELSE()
9997 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9998 IEM_MC_ENDIF();
9999 IEM_MC_ADVANCE_RIP();
10000
10001 IEM_MC_END();
10002 return VINF_SUCCESS;
10003}
10004
10005
10006/** Opcode 0xdf !11/1. */
10007FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10008{
10009 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10010 IEM_MC_BEGIN(3, 2);
10011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10012 IEM_MC_LOCAL(uint16_t, u16Fsw);
10013 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10014 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10015 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10016
10017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10021
10022 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10023 IEM_MC_PREPARE_FPU_USAGE();
10024 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10025 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10026 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10027 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10028 IEM_MC_ELSE()
10029 IEM_MC_IF_FCW_IM()
10030 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10031 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10032 IEM_MC_ENDIF();
10033 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10034 IEM_MC_ENDIF();
10035 IEM_MC_ADVANCE_RIP();
10036
10037 IEM_MC_END();
10038 return VINF_SUCCESS;
10039}
10040
10041
10042/** Opcode 0xdf !11/2. */
10043FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10044{
10045 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10046 IEM_MC_BEGIN(3, 2);
10047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10048 IEM_MC_LOCAL(uint16_t, u16Fsw);
10049 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10050 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10051 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10052
10053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10056 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10057
10058 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10059 IEM_MC_PREPARE_FPU_USAGE();
10060 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10061 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10062 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10063 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10064 IEM_MC_ELSE()
10065 IEM_MC_IF_FCW_IM()
10066 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10067 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10068 IEM_MC_ENDIF();
10069 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10070 IEM_MC_ENDIF();
10071 IEM_MC_ADVANCE_RIP();
10072
10073 IEM_MC_END();
10074 return VINF_SUCCESS;
10075}
10076
10077
10078/** Opcode 0xdf !11/3. */
10079FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10080{
10081 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10082 IEM_MC_BEGIN(3, 2);
10083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10084 IEM_MC_LOCAL(uint16_t, u16Fsw);
10085 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10086 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10087 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10088
10089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10092 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10093
10094 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10095 IEM_MC_PREPARE_FPU_USAGE();
10096 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10097 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10098 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10099 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10100 IEM_MC_ELSE()
10101 IEM_MC_IF_FCW_IM()
10102 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10103 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10104 IEM_MC_ENDIF();
10105 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10106 IEM_MC_ENDIF();
10107 IEM_MC_ADVANCE_RIP();
10108
10109 IEM_MC_END();
10110 return VINF_SUCCESS;
10111}
10112
10113
10114/** Opcode 0xdf !11/4. */
10115FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10116{
10117 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10118
10119 IEM_MC_BEGIN(2, 3);
10120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10121 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10122 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10123 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10124 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10125
10126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10128
10129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10131 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10132
10133 IEM_MC_PREPARE_FPU_USAGE();
10134 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10135 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10136 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10137 IEM_MC_ELSE()
10138 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10139 IEM_MC_ENDIF();
10140 IEM_MC_ADVANCE_RIP();
10141
10142 IEM_MC_END();
10143 return VINF_SUCCESS;
10144}
10145
10146
10147/** Opcode 0xdf !11/5. */
10148FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10149{
10150 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10151
10152 IEM_MC_BEGIN(2, 3);
10153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10154 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10155 IEM_MC_LOCAL(int64_t, i64Val);
10156 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10157 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10158
10159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10161
10162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10164 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10165
10166 IEM_MC_PREPARE_FPU_USAGE();
10167 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10168 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10169 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10170 IEM_MC_ELSE()
10171 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10172 IEM_MC_ENDIF();
10173 IEM_MC_ADVANCE_RIP();
10174
10175 IEM_MC_END();
10176 return VINF_SUCCESS;
10177}
10178
10179
10180/** Opcode 0xdf !11/6. */
10181FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10182{
10183 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10184 IEM_MC_BEGIN(3, 2);
10185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10186 IEM_MC_LOCAL(uint16_t, u16Fsw);
10187 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10188 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10189 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10190
10191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10193 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10194 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10195
10196 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10197 IEM_MC_PREPARE_FPU_USAGE();
10198 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10199 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10200 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10201 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10202 IEM_MC_ELSE()
10203 IEM_MC_IF_FCW_IM()
10204 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10205 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10206 IEM_MC_ENDIF();
10207 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10208 IEM_MC_ENDIF();
10209 IEM_MC_ADVANCE_RIP();
10210
10211 IEM_MC_END();
10212 return VINF_SUCCESS;
10213}
10214
10215
10216/** Opcode 0xdf !11/7. */
10217FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10218{
10219 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10220 IEM_MC_BEGIN(3, 2);
10221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10222 IEM_MC_LOCAL(uint16_t, u16Fsw);
10223 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10224 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10225 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10226
10227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10229 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10230 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10231
10232 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10233 IEM_MC_PREPARE_FPU_USAGE();
10234 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10235 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10236 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10237 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10238 IEM_MC_ELSE()
10239 IEM_MC_IF_FCW_IM()
10240 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10241 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10242 IEM_MC_ENDIF();
10243 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10244 IEM_MC_ENDIF();
10245 IEM_MC_ADVANCE_RIP();
10246
10247 IEM_MC_END();
10248 return VINF_SUCCESS;
10249}
10250
10251
10252/**
10253 * @opcode 0xdf
10254 */
10255FNIEMOP_DEF(iemOp_EscF7)
10256{
10257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10258 if (IEM_IS_MODRM_REG_MODE(bRm))
10259 {
10260 switch (IEM_GET_MODRM_REG_8(bRm))
10261 {
10262 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10263 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10264 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10265 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10266 case 4: if (bRm == 0xe0)
10267 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10268 return IEMOP_RAISE_INVALID_OPCODE();
10269 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10270 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10271 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10273 }
10274 }
10275 else
10276 {
10277 switch (IEM_GET_MODRM_REG_8(bRm))
10278 {
10279 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10280 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10281 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10282 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10283 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10284 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10285 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10286 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10288 }
10289 }
10290}
10291
10292
10293/**
10294 * @opcode 0xe0
10295 */
10296FNIEMOP_DEF(iemOp_loopne_Jb)
10297{
10298 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10299 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10302
10303 switch (pVCpu->iem.s.enmEffAddrMode)
10304 {
10305 case IEMMODE_16BIT:
10306 IEM_MC_BEGIN(0,0);
10307 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10308 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10309 IEM_MC_REL_JMP_S8(i8Imm);
10310 } IEM_MC_ELSE() {
10311 IEM_MC_ADVANCE_RIP();
10312 } IEM_MC_ENDIF();
10313 IEM_MC_END();
10314 return VINF_SUCCESS;
10315
10316 case IEMMODE_32BIT:
10317 IEM_MC_BEGIN(0,0);
10318 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10319 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10320 IEM_MC_REL_JMP_S8(i8Imm);
10321 } IEM_MC_ELSE() {
10322 IEM_MC_ADVANCE_RIP();
10323 } IEM_MC_ENDIF();
10324 IEM_MC_END();
10325 return VINF_SUCCESS;
10326
10327 case IEMMODE_64BIT:
10328 IEM_MC_BEGIN(0,0);
10329 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10330 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10331 IEM_MC_REL_JMP_S8(i8Imm);
10332 } IEM_MC_ELSE() {
10333 IEM_MC_ADVANCE_RIP();
10334 } IEM_MC_ENDIF();
10335 IEM_MC_END();
10336 return VINF_SUCCESS;
10337
10338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10339 }
10340}
10341
10342
10343/**
10344 * @opcode 0xe1
10345 */
10346FNIEMOP_DEF(iemOp_loope_Jb)
10347{
10348 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10349 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10351 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10352
10353 switch (pVCpu->iem.s.enmEffAddrMode)
10354 {
10355 case IEMMODE_16BIT:
10356 IEM_MC_BEGIN(0,0);
10357 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10358 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10359 IEM_MC_REL_JMP_S8(i8Imm);
10360 } IEM_MC_ELSE() {
10361 IEM_MC_ADVANCE_RIP();
10362 } IEM_MC_ENDIF();
10363 IEM_MC_END();
10364 return VINF_SUCCESS;
10365
10366 case IEMMODE_32BIT:
10367 IEM_MC_BEGIN(0,0);
10368 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10369 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10370 IEM_MC_REL_JMP_S8(i8Imm);
10371 } IEM_MC_ELSE() {
10372 IEM_MC_ADVANCE_RIP();
10373 } IEM_MC_ENDIF();
10374 IEM_MC_END();
10375 return VINF_SUCCESS;
10376
10377 case IEMMODE_64BIT:
10378 IEM_MC_BEGIN(0,0);
10379 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10380 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10381 IEM_MC_REL_JMP_S8(i8Imm);
10382 } IEM_MC_ELSE() {
10383 IEM_MC_ADVANCE_RIP();
10384 } IEM_MC_ENDIF();
10385 IEM_MC_END();
10386 return VINF_SUCCESS;
10387
10388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10389 }
10390}
10391
10392
10393/**
10394 * @opcode 0xe2
10395 */
10396FNIEMOP_DEF(iemOp_loop_Jb)
10397{
10398 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10399 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10402
10403 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
10404 * using the 32-bit operand size override. How can that be restarted? See
10405 * weird pseudo code in intel manual. */
10406
10407 /** NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10408 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10409 * the loop causes guest crashes, but when logging it's nice to skip a few million
10410 * lines of useless output. */
10411#if defined(LOG_ENABLED)
10412 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10413 switch (pVCpu->iem.s.enmEffAddrMode)
10414 {
10415 case IEMMODE_16BIT:
10416 IEM_MC_BEGIN(0,0);
10417 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10418 IEM_MC_ADVANCE_RIP();
10419 IEM_MC_END();
10420 return VINF_SUCCESS;
10421
10422 case IEMMODE_32BIT:
10423 IEM_MC_BEGIN(0,0);
10424 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10425 IEM_MC_ADVANCE_RIP();
10426 IEM_MC_END();
10427 return VINF_SUCCESS;
10428
10429 case IEMMODE_64BIT:
10430 IEM_MC_BEGIN(0,0);
10431 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10432 IEM_MC_ADVANCE_RIP();
10433 IEM_MC_END();
10434 return VINF_SUCCESS;
10435
10436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10437 }
10438#endif
10439
10440 switch (pVCpu->iem.s.enmEffAddrMode)
10441 {
10442 case IEMMODE_16BIT:
10443 IEM_MC_BEGIN(0,0);
10444
10445 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10446 IEM_MC_IF_CX_IS_NZ() {
10447 IEM_MC_REL_JMP_S8(i8Imm);
10448 } IEM_MC_ELSE() {
10449 IEM_MC_ADVANCE_RIP();
10450 } IEM_MC_ENDIF();
10451 IEM_MC_END();
10452 return VINF_SUCCESS;
10453
10454 case IEMMODE_32BIT:
10455 IEM_MC_BEGIN(0,0);
10456 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10457 IEM_MC_IF_ECX_IS_NZ() {
10458 IEM_MC_REL_JMP_S8(i8Imm);
10459 } IEM_MC_ELSE() {
10460 IEM_MC_ADVANCE_RIP();
10461 } IEM_MC_ENDIF();
10462 IEM_MC_END();
10463 return VINF_SUCCESS;
10464
10465 case IEMMODE_64BIT:
10466 IEM_MC_BEGIN(0,0);
10467 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10468 IEM_MC_IF_RCX_IS_NZ() {
10469 IEM_MC_REL_JMP_S8(i8Imm);
10470 } IEM_MC_ELSE() {
10471 IEM_MC_ADVANCE_RIP();
10472 } IEM_MC_ENDIF();
10473 IEM_MC_END();
10474 return VINF_SUCCESS;
10475
10476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10477 }
10478}
10479
10480
10481/**
10482 * @opcode 0xe3
10483 */
10484FNIEMOP_DEF(iemOp_jecxz_Jb)
10485{
10486 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10487 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10490
10491 switch (pVCpu->iem.s.enmEffAddrMode)
10492 {
10493 case IEMMODE_16BIT:
10494 IEM_MC_BEGIN(0,0);
10495 IEM_MC_IF_CX_IS_NZ() {
10496 IEM_MC_ADVANCE_RIP();
10497 } IEM_MC_ELSE() {
10498 IEM_MC_REL_JMP_S8(i8Imm);
10499 } IEM_MC_ENDIF();
10500 IEM_MC_END();
10501 return VINF_SUCCESS;
10502
10503 case IEMMODE_32BIT:
10504 IEM_MC_BEGIN(0,0);
10505 IEM_MC_IF_ECX_IS_NZ() {
10506 IEM_MC_ADVANCE_RIP();
10507 } IEM_MC_ELSE() {
10508 IEM_MC_REL_JMP_S8(i8Imm);
10509 } IEM_MC_ENDIF();
10510 IEM_MC_END();
10511 return VINF_SUCCESS;
10512
10513 case IEMMODE_64BIT:
10514 IEM_MC_BEGIN(0,0);
10515 IEM_MC_IF_RCX_IS_NZ() {
10516 IEM_MC_ADVANCE_RIP();
10517 } IEM_MC_ELSE() {
10518 IEM_MC_REL_JMP_S8(i8Imm);
10519 } IEM_MC_ENDIF();
10520 IEM_MC_END();
10521 return VINF_SUCCESS;
10522
10523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10524 }
10525}
10526
10527
10528/** Opcode 0xe4 */
10529FNIEMOP_DEF(iemOp_in_AL_Ib)
10530{
10531 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10532 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10534 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10535}
10536
10537
10538/** Opcode 0xe5 */
10539FNIEMOP_DEF(iemOp_in_eAX_Ib)
10540{
10541 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10542 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10544 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10545}
10546
10547
10548/** Opcode 0xe6 */
10549FNIEMOP_DEF(iemOp_out_Ib_AL)
10550{
10551 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10552 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10554 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10555}
10556
10557
10558/** Opcode 0xe7 */
10559FNIEMOP_DEF(iemOp_out_Ib_eAX)
10560{
10561 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10562 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10564 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10565}
10566
10567
10568/**
10569 * @opcode 0xe8
10570 */
10571FNIEMOP_DEF(iemOp_call_Jv)
10572{
10573 IEMOP_MNEMONIC(call_Jv, "call Jv");
10574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10575 switch (pVCpu->iem.s.enmEffOpSize)
10576 {
10577 case IEMMODE_16BIT:
10578 {
10579 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10580 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10581 }
10582
10583 case IEMMODE_32BIT:
10584 {
10585 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10586 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10587 }
10588
10589 case IEMMODE_64BIT:
10590 {
10591 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10592 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10593 }
10594
10595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10596 }
10597}
10598
10599
10600/**
10601 * @opcode 0xe9
10602 */
10603FNIEMOP_DEF(iemOp_jmp_Jv)
10604{
10605 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10607 switch (pVCpu->iem.s.enmEffOpSize)
10608 {
10609 case IEMMODE_16BIT:
10610 {
10611 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10612 IEM_MC_BEGIN(0, 0);
10613 IEM_MC_REL_JMP_S16(i16Imm);
10614 IEM_MC_END();
10615 return VINF_SUCCESS;
10616 }
10617
10618 case IEMMODE_64BIT:
10619 case IEMMODE_32BIT:
10620 {
10621 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10622 IEM_MC_BEGIN(0, 0);
10623 IEM_MC_REL_JMP_S32(i32Imm);
10624 IEM_MC_END();
10625 return VINF_SUCCESS;
10626 }
10627
10628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10629 }
10630}
10631
10632
10633/**
10634 * @opcode 0xea
10635 */
10636FNIEMOP_DEF(iemOp_jmp_Ap)
10637{
10638 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10639 IEMOP_HLP_NO_64BIT();
10640
10641 /* Decode the far pointer address and pass it on to the far call C implementation. */
10642 uint32_t offSeg;
10643 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10644 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10645 else
10646 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10647 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10649 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10650}
10651
10652
10653/**
10654 * @opcode 0xeb
10655 */
10656FNIEMOP_DEF(iemOp_jmp_Jb)
10657{
10658 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10659 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10662
10663 IEM_MC_BEGIN(0, 0);
10664 IEM_MC_REL_JMP_S8(i8Imm);
10665 IEM_MC_END();
10666 return VINF_SUCCESS;
10667}
10668
10669
10670/** Opcode 0xec */
10671FNIEMOP_DEF(iemOp_in_AL_DX)
10672{
10673 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10675 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10676}
10677
10678
10679/** Opcode 0xed */
10680FNIEMOP_DEF(iemOp_in_eAX_DX)
10681{
10682 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10684 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10685}
10686
10687
10688/** Opcode 0xee */
10689FNIEMOP_DEF(iemOp_out_DX_AL)
10690{
10691 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10693 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10694}
10695
10696
10697/** Opcode 0xef */
10698FNIEMOP_DEF(iemOp_out_DX_eAX)
10699{
10700 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10702 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10703}
10704
10705
10706/**
10707 * @opcode 0xf0
10708 */
10709FNIEMOP_DEF(iemOp_lock)
10710{
10711 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10712 if (!pVCpu->iem.s.fDisregardLock)
10713 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10714
10715 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10716 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10717}
10718
10719
10720/**
10721 * @opcode 0xf1
10722 */
10723FNIEMOP_DEF(iemOp_int1)
10724{
10725 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10726 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
10727 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10728 * LOADALL memo. Needs some testing. */
10729 IEMOP_HLP_MIN_386();
10730 /** @todo testcase! */
10731 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10732}
10733
10734
10735/**
10736 * @opcode 0xf2
10737 */
10738FNIEMOP_DEF(iemOp_repne)
10739{
10740 /* This overrides any previous REPE prefix. */
10741 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10742 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10743 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10744
10745 /* For the 4 entry opcode tables, REPNZ overrides any previous
10746 REPZ and operand size prefixes. */
10747 pVCpu->iem.s.idxPrefix = 3;
10748
10749 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10750 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10751}
10752
10753
10754/**
10755 * @opcode 0xf3
10756 */
10757FNIEMOP_DEF(iemOp_repe)
10758{
10759 /* This overrides any previous REPNE prefix. */
10760 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10761 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10762 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10763
10764 /* For the 4 entry opcode tables, REPNZ overrides any previous
10765 REPNZ and operand size prefixes. */
10766 pVCpu->iem.s.idxPrefix = 2;
10767
10768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10770}
10771
10772
10773/**
10774 * @opcode 0xf4
10775 */
10776FNIEMOP_DEF(iemOp_hlt)
10777{
10778 IEMOP_MNEMONIC(hlt, "hlt");
10779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10780 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10781}
10782
10783
10784/**
10785 * @opcode 0xf5
10786 */
10787FNIEMOP_DEF(iemOp_cmc)
10788{
10789 IEMOP_MNEMONIC(cmc, "cmc");
10790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10791 IEM_MC_BEGIN(0, 0);
10792 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10793 IEM_MC_ADVANCE_RIP();
10794 IEM_MC_END();
10795 return VINF_SUCCESS;
10796}
10797
10798
10799/**
10800 * Common implementation of 'inc/dec/not/neg Eb'.
10801 *
10802 * @param bRm The RM byte.
10803 * @param pImpl The instruction implementation.
10804 */
10805FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10806{
10807 if (IEM_IS_MODRM_REG_MODE(bRm))
10808 {
10809 /* register access */
10810 IEM_MC_BEGIN(2, 0);
10811 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10812 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10813 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10814 IEM_MC_REF_EFLAGS(pEFlags);
10815 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10816 IEM_MC_ADVANCE_RIP();
10817 IEM_MC_END();
10818 }
10819 else
10820 {
10821 /* memory access. */
10822 IEM_MC_BEGIN(2, 2);
10823 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10824 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10826
10827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10828 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10829 IEM_MC_FETCH_EFLAGS(EFlags);
10830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10831 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10832 else
10833 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10834
10835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10836 IEM_MC_COMMIT_EFLAGS(EFlags);
10837 IEM_MC_ADVANCE_RIP();
10838 IEM_MC_END();
10839 }
10840 return VINF_SUCCESS;
10841}
10842
10843
10844/**
10845 * Common implementation of 'inc/dec/not/neg Ev'.
10846 *
10847 * @param bRm The RM byte.
10848 * @param pImpl The instruction implementation.
10849 */
10850FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10851{
10852 /* Registers are handled by a common worker. */
10853 if (IEM_IS_MODRM_REG_MODE(bRm))
10854 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
10855
10856 /* Memory we do here. */
10857 switch (pVCpu->iem.s.enmEffOpSize)
10858 {
10859 case IEMMODE_16BIT:
10860 IEM_MC_BEGIN(2, 2);
10861 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10862 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10864
10865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10866 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10867 IEM_MC_FETCH_EFLAGS(EFlags);
10868 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10869 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10870 else
10871 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10872
10873 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10874 IEM_MC_COMMIT_EFLAGS(EFlags);
10875 IEM_MC_ADVANCE_RIP();
10876 IEM_MC_END();
10877 return VINF_SUCCESS;
10878
10879 case IEMMODE_32BIT:
10880 IEM_MC_BEGIN(2, 2);
10881 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10882 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10884
10885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10886 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10887 IEM_MC_FETCH_EFLAGS(EFlags);
10888 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10889 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10890 else
10891 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10892
10893 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10894 IEM_MC_COMMIT_EFLAGS(EFlags);
10895 IEM_MC_ADVANCE_RIP();
10896 IEM_MC_END();
10897 return VINF_SUCCESS;
10898
10899 case IEMMODE_64BIT:
10900 IEM_MC_BEGIN(2, 2);
10901 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10902 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10904
10905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10906 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10907 IEM_MC_FETCH_EFLAGS(EFlags);
10908 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10909 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10910 else
10911 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10912
10913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10914 IEM_MC_COMMIT_EFLAGS(EFlags);
10915 IEM_MC_ADVANCE_RIP();
10916 IEM_MC_END();
10917 return VINF_SUCCESS;
10918
10919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10920 }
10921}
10922
10923
10924/** Opcode 0xf6 /0. */
10925FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10926{
10927 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10929
10930 if (IEM_IS_MODRM_REG_MODE(bRm))
10931 {
10932 /* register access */
10933 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10935
10936 IEM_MC_BEGIN(3, 0);
10937 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10938 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10940 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10941 IEM_MC_REF_EFLAGS(pEFlags);
10942 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10943 IEM_MC_ADVANCE_RIP();
10944 IEM_MC_END();
10945 }
10946 else
10947 {
10948 /* memory access. */
10949 IEM_MC_BEGIN(3, 2);
10950 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10951 IEM_MC_ARG(uint8_t, u8Src, 1);
10952 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10954
10955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10956 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10957 IEM_MC_ASSIGN(u8Src, u8Imm);
10958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10959 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10960 IEM_MC_FETCH_EFLAGS(EFlags);
10961 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10962
10963 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10964 IEM_MC_COMMIT_EFLAGS(EFlags);
10965 IEM_MC_ADVANCE_RIP();
10966 IEM_MC_END();
10967 }
10968 return VINF_SUCCESS;
10969}
10970
10971
10972/** Opcode 0xf7 /0. */
10973FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10974{
10975 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10977
10978 if (IEM_IS_MODRM_REG_MODE(bRm))
10979 {
10980 /* register access */
10981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10982 switch (pVCpu->iem.s.enmEffOpSize)
10983 {
10984 case IEMMODE_16BIT:
10985 {
10986 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10987 IEM_MC_BEGIN(3, 0);
10988 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10989 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10990 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10991 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10992 IEM_MC_REF_EFLAGS(pEFlags);
10993 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10994 IEM_MC_ADVANCE_RIP();
10995 IEM_MC_END();
10996 return VINF_SUCCESS;
10997 }
10998
10999 case IEMMODE_32BIT:
11000 {
11001 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11002 IEM_MC_BEGIN(3, 0);
11003 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11004 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
11005 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11006 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11007 IEM_MC_REF_EFLAGS(pEFlags);
11008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11009 /* No clearing the high dword here - test doesn't write back the result. */
11010 IEM_MC_ADVANCE_RIP();
11011 IEM_MC_END();
11012 return VINF_SUCCESS;
11013 }
11014
11015 case IEMMODE_64BIT:
11016 {
11017 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11018 IEM_MC_BEGIN(3, 0);
11019 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11020 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
11021 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11022 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11023 IEM_MC_REF_EFLAGS(pEFlags);
11024 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11025 IEM_MC_ADVANCE_RIP();
11026 IEM_MC_END();
11027 return VINF_SUCCESS;
11028 }
11029
11030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11031 }
11032 }
11033 else
11034 {
11035 /* memory access. */
11036 switch (pVCpu->iem.s.enmEffOpSize)
11037 {
11038 case IEMMODE_16BIT:
11039 {
11040 IEM_MC_BEGIN(3, 2);
11041 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11042 IEM_MC_ARG(uint16_t, u16Src, 1);
11043 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11045
11046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
11047 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11048 IEM_MC_ASSIGN(u16Src, u16Imm);
11049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11050 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11051 IEM_MC_FETCH_EFLAGS(EFlags);
11052 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
11053
11054 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
11055 IEM_MC_COMMIT_EFLAGS(EFlags);
11056 IEM_MC_ADVANCE_RIP();
11057 IEM_MC_END();
11058 return VINF_SUCCESS;
11059 }
11060
11061 case IEMMODE_32BIT:
11062 {
11063 IEM_MC_BEGIN(3, 2);
11064 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11065 IEM_MC_ARG(uint32_t, u32Src, 1);
11066 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11068
11069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11070 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11071 IEM_MC_ASSIGN(u32Src, u32Imm);
11072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11073 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11074 IEM_MC_FETCH_EFLAGS(EFlags);
11075 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11076
11077 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
11078 IEM_MC_COMMIT_EFLAGS(EFlags);
11079 IEM_MC_ADVANCE_RIP();
11080 IEM_MC_END();
11081 return VINF_SUCCESS;
11082 }
11083
11084 case IEMMODE_64BIT:
11085 {
11086 IEM_MC_BEGIN(3, 2);
11087 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11088 IEM_MC_ARG(uint64_t, u64Src, 1);
11089 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11091
11092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11093 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11094 IEM_MC_ASSIGN(u64Src, u64Imm);
11095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11096 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11097 IEM_MC_FETCH_EFLAGS(EFlags);
11098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11099
11100 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11101 IEM_MC_COMMIT_EFLAGS(EFlags);
11102 IEM_MC_ADVANCE_RIP();
11103 IEM_MC_END();
11104 return VINF_SUCCESS;
11105 }
11106
11107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11108 }
11109 }
11110}
11111
11112
11113/** Opcode 0xf6 /4, /5, /6 and /7. */
11114FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11115{
11116 if (IEM_IS_MODRM_REG_MODE(bRm))
11117 {
11118 /* register access */
11119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11120 IEM_MC_BEGIN(3, 1);
11121 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11122 IEM_MC_ARG(uint8_t, u8Value, 1);
11123 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11124 IEM_MC_LOCAL(int32_t, rc);
11125
11126 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11127 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11128 IEM_MC_REF_EFLAGS(pEFlags);
11129 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11130 IEM_MC_IF_LOCAL_IS_Z(rc) {
11131 IEM_MC_ADVANCE_RIP();
11132 } IEM_MC_ELSE() {
11133 IEM_MC_RAISE_DIVIDE_ERROR();
11134 } IEM_MC_ENDIF();
11135
11136 IEM_MC_END();
11137 }
11138 else
11139 {
11140 /* memory access. */
11141 IEM_MC_BEGIN(3, 2);
11142 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11143 IEM_MC_ARG(uint8_t, u8Value, 1);
11144 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11146 IEM_MC_LOCAL(int32_t, rc);
11147
11148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11150 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11151 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11152 IEM_MC_REF_EFLAGS(pEFlags);
11153 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11154 IEM_MC_IF_LOCAL_IS_Z(rc) {
11155 IEM_MC_ADVANCE_RIP();
11156 } IEM_MC_ELSE() {
11157 IEM_MC_RAISE_DIVIDE_ERROR();
11158 } IEM_MC_ENDIF();
11159
11160 IEM_MC_END();
11161 }
11162 return VINF_SUCCESS;
11163}
11164
11165
11166/** Opcode 0xf7 /4, /5, /6 and /7. */
11167FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11168{
11169 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11170
11171 if (IEM_IS_MODRM_REG_MODE(bRm))
11172 {
11173 /* register access */
11174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11175 switch (pVCpu->iem.s.enmEffOpSize)
11176 {
11177 case IEMMODE_16BIT:
11178 {
11179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11180 IEM_MC_BEGIN(4, 1);
11181 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11182 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11183 IEM_MC_ARG(uint16_t, u16Value, 2);
11184 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11185 IEM_MC_LOCAL(int32_t, rc);
11186
11187 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11188 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11189 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11190 IEM_MC_REF_EFLAGS(pEFlags);
11191 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11192 IEM_MC_IF_LOCAL_IS_Z(rc) {
11193 IEM_MC_ADVANCE_RIP();
11194 } IEM_MC_ELSE() {
11195 IEM_MC_RAISE_DIVIDE_ERROR();
11196 } IEM_MC_ENDIF();
11197
11198 IEM_MC_END();
11199 return VINF_SUCCESS;
11200 }
11201
11202 case IEMMODE_32BIT:
11203 {
11204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11205 IEM_MC_BEGIN(4, 1);
11206 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11207 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11208 IEM_MC_ARG(uint32_t, u32Value, 2);
11209 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11210 IEM_MC_LOCAL(int32_t, rc);
11211
11212 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11213 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11214 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11215 IEM_MC_REF_EFLAGS(pEFlags);
11216 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11217 IEM_MC_IF_LOCAL_IS_Z(rc) {
11218 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11219 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11220 IEM_MC_ADVANCE_RIP();
11221 } IEM_MC_ELSE() {
11222 IEM_MC_RAISE_DIVIDE_ERROR();
11223 } IEM_MC_ENDIF();
11224
11225 IEM_MC_END();
11226 return VINF_SUCCESS;
11227 }
11228
11229 case IEMMODE_64BIT:
11230 {
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11232 IEM_MC_BEGIN(4, 1);
11233 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11234 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11235 IEM_MC_ARG(uint64_t, u64Value, 2);
11236 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11237 IEM_MC_LOCAL(int32_t, rc);
11238
11239 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11240 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11241 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11242 IEM_MC_REF_EFLAGS(pEFlags);
11243 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11244 IEM_MC_IF_LOCAL_IS_Z(rc) {
11245 IEM_MC_ADVANCE_RIP();
11246 } IEM_MC_ELSE() {
11247 IEM_MC_RAISE_DIVIDE_ERROR();
11248 } IEM_MC_ENDIF();
11249
11250 IEM_MC_END();
11251 return VINF_SUCCESS;
11252 }
11253
11254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11255 }
11256 }
11257 else
11258 {
11259 /* memory access. */
11260 switch (pVCpu->iem.s.enmEffOpSize)
11261 {
11262 case IEMMODE_16BIT:
11263 {
11264 IEM_MC_BEGIN(4, 2);
11265 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11266 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11267 IEM_MC_ARG(uint16_t, u16Value, 2);
11268 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11270 IEM_MC_LOCAL(int32_t, rc);
11271
11272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11274 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11275 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11276 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11277 IEM_MC_REF_EFLAGS(pEFlags);
11278 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11279 IEM_MC_IF_LOCAL_IS_Z(rc) {
11280 IEM_MC_ADVANCE_RIP();
11281 } IEM_MC_ELSE() {
11282 IEM_MC_RAISE_DIVIDE_ERROR();
11283 } IEM_MC_ENDIF();
11284
11285 IEM_MC_END();
11286 return VINF_SUCCESS;
11287 }
11288
11289 case IEMMODE_32BIT:
11290 {
11291 IEM_MC_BEGIN(4, 2);
11292 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11293 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11294 IEM_MC_ARG(uint32_t, u32Value, 2);
11295 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11297 IEM_MC_LOCAL(int32_t, rc);
11298
11299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11301 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11302 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11303 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11304 IEM_MC_REF_EFLAGS(pEFlags);
11305 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11306 IEM_MC_IF_LOCAL_IS_Z(rc) {
11307 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11308 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11309 IEM_MC_ADVANCE_RIP();
11310 } IEM_MC_ELSE() {
11311 IEM_MC_RAISE_DIVIDE_ERROR();
11312 } IEM_MC_ENDIF();
11313
11314 IEM_MC_END();
11315 return VINF_SUCCESS;
11316 }
11317
11318 case IEMMODE_64BIT:
11319 {
11320 IEM_MC_BEGIN(4, 2);
11321 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11322 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11323 IEM_MC_ARG(uint64_t, u64Value, 2);
11324 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11326 IEM_MC_LOCAL(int32_t, rc);
11327
11328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11331 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11332 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11333 IEM_MC_REF_EFLAGS(pEFlags);
11334 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11335 IEM_MC_IF_LOCAL_IS_Z(rc) {
11336 IEM_MC_ADVANCE_RIP();
11337 } IEM_MC_ELSE() {
11338 IEM_MC_RAISE_DIVIDE_ERROR();
11339 } IEM_MC_ENDIF();
11340
11341 IEM_MC_END();
11342 return VINF_SUCCESS;
11343 }
11344
11345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11346 }
11347 }
11348}
11349
11350/**
11351 * @opcode 0xf6
11352 */
11353FNIEMOP_DEF(iemOp_Grp3_Eb)
11354{
11355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11356 switch (IEM_GET_MODRM_REG_8(bRm))
11357 {
11358 case 0:
11359 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11360 case 1:
11361/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11362 return IEMOP_RAISE_INVALID_OPCODE();
11363 case 2:
11364 IEMOP_MNEMONIC(not_Eb, "not Eb");
11365 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11366 case 3:
11367 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11368 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11369 case 4:
11370 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11372 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11373 case 5:
11374 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11375 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11376 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11377 case 6:
11378 IEMOP_MNEMONIC(div_Eb, "div Eb");
11379 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11380 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11381 case 7:
11382 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11383 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11384 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11386 }
11387}
11388
11389
11390/**
11391 * @opcode 0xf7
11392 */
11393FNIEMOP_DEF(iemOp_Grp3_Ev)
11394{
11395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11396 switch (IEM_GET_MODRM_REG_8(bRm))
11397 {
11398 case 0:
11399 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11400 case 1:
11401/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11402 return IEMOP_RAISE_INVALID_OPCODE();
11403 case 2:
11404 IEMOP_MNEMONIC(not_Ev, "not Ev");
11405 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11406 case 3:
11407 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11408 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11409 case 4:
11410 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11411 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11412 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11413 case 5:
11414 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11415 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11416 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11417 case 6:
11418 IEMOP_MNEMONIC(div_Ev, "div Ev");
11419 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11420 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11421 case 7:
11422 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11423 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11424 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11426 }
11427}
11428
11429
11430/**
11431 * @opcode 0xf8
11432 */
11433FNIEMOP_DEF(iemOp_clc)
11434{
11435 IEMOP_MNEMONIC(clc, "clc");
11436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11437 IEM_MC_BEGIN(0, 0);
11438 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11439 IEM_MC_ADVANCE_RIP();
11440 IEM_MC_END();
11441 return VINF_SUCCESS;
11442}
11443
11444
11445/**
11446 * @opcode 0xf9
11447 */
11448FNIEMOP_DEF(iemOp_stc)
11449{
11450 IEMOP_MNEMONIC(stc, "stc");
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 IEM_MC_BEGIN(0, 0);
11453 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11454 IEM_MC_ADVANCE_RIP();
11455 IEM_MC_END();
11456 return VINF_SUCCESS;
11457}
11458
11459
11460/**
11461 * @opcode 0xfa
11462 */
11463FNIEMOP_DEF(iemOp_cli)
11464{
11465 IEMOP_MNEMONIC(cli, "cli");
11466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11467 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11468}
11469
11470
11471FNIEMOP_DEF(iemOp_sti)
11472{
11473 IEMOP_MNEMONIC(sti, "sti");
11474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11475 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11476}
11477
11478
11479/**
11480 * @opcode 0xfc
11481 */
11482FNIEMOP_DEF(iemOp_cld)
11483{
11484 IEMOP_MNEMONIC(cld, "cld");
11485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11486 IEM_MC_BEGIN(0, 0);
11487 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11488 IEM_MC_ADVANCE_RIP();
11489 IEM_MC_END();
11490 return VINF_SUCCESS;
11491}
11492
11493
11494/**
11495 * @opcode 0xfd
11496 */
11497FNIEMOP_DEF(iemOp_std)
11498{
11499 IEMOP_MNEMONIC(std, "std");
11500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11501 IEM_MC_BEGIN(0, 0);
11502 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11503 IEM_MC_ADVANCE_RIP();
11504 IEM_MC_END();
11505 return VINF_SUCCESS;
11506}
11507
11508
11509/**
11510 * @opcode 0xfe
11511 */
11512FNIEMOP_DEF(iemOp_Grp4)
11513{
11514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11515 switch (IEM_GET_MODRM_REG_8(bRm))
11516 {
11517 case 0:
11518 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11519 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11520 case 1:
11521 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11522 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11523 default:
11524 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11525 return IEMOP_RAISE_INVALID_OPCODE();
11526 }
11527}
11528
11529
11530/**
11531 * Opcode 0xff /2.
11532 * @param bRm The RM byte.
11533 */
11534FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11535{
11536 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11538
11539 if (IEM_IS_MODRM_REG_MODE(bRm))
11540 {
11541 /* The new RIP is taken from a register. */
11542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11543 switch (pVCpu->iem.s.enmEffOpSize)
11544 {
11545 case IEMMODE_16BIT:
11546 IEM_MC_BEGIN(1, 0);
11547 IEM_MC_ARG(uint16_t, u16Target, 0);
11548 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11549 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11550 IEM_MC_END()
11551 return VINF_SUCCESS;
11552
11553 case IEMMODE_32BIT:
11554 IEM_MC_BEGIN(1, 0);
11555 IEM_MC_ARG(uint32_t, u32Target, 0);
11556 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11557 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11558 IEM_MC_END()
11559 return VINF_SUCCESS;
11560
11561 case IEMMODE_64BIT:
11562 IEM_MC_BEGIN(1, 0);
11563 IEM_MC_ARG(uint64_t, u64Target, 0);
11564 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11565 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11566 IEM_MC_END()
11567 return VINF_SUCCESS;
11568
11569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11570 }
11571 }
11572 else
11573 {
11574 /* The new RIP is taken from a register. */
11575 switch (pVCpu->iem.s.enmEffOpSize)
11576 {
11577 case IEMMODE_16BIT:
11578 IEM_MC_BEGIN(1, 1);
11579 IEM_MC_ARG(uint16_t, u16Target, 0);
11580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11583 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11584 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11585 IEM_MC_END()
11586 return VINF_SUCCESS;
11587
11588 case IEMMODE_32BIT:
11589 IEM_MC_BEGIN(1, 1);
11590 IEM_MC_ARG(uint32_t, u32Target, 0);
11591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11594 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11595 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11596 IEM_MC_END()
11597 return VINF_SUCCESS;
11598
11599 case IEMMODE_64BIT:
11600 IEM_MC_BEGIN(1, 1);
11601 IEM_MC_ARG(uint64_t, u64Target, 0);
11602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11605 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11606 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11607 IEM_MC_END()
11608 return VINF_SUCCESS;
11609
11610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11611 }
11612 }
11613}
11614
11615typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11616
11617FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11618{
11619 /* Registers? How?? */
11620 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
11621 { /* likely */ }
11622 else
11623 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11624
11625 /* Far pointer loaded from memory. */
11626 switch (pVCpu->iem.s.enmEffOpSize)
11627 {
11628 case IEMMODE_16BIT:
11629 IEM_MC_BEGIN(3, 1);
11630 IEM_MC_ARG(uint16_t, u16Sel, 0);
11631 IEM_MC_ARG(uint16_t, offSeg, 1);
11632 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11636 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11637 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11638 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11639 IEM_MC_END();
11640 return VINF_SUCCESS;
11641
11642 case IEMMODE_64BIT:
11643 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11644 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11645 * and call far qword [rsp] encodings. */
11646 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11647 {
11648 IEM_MC_BEGIN(3, 1);
11649 IEM_MC_ARG(uint16_t, u16Sel, 0);
11650 IEM_MC_ARG(uint64_t, offSeg, 1);
11651 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11655 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11656 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11657 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11658 IEM_MC_END();
11659 return VINF_SUCCESS;
11660 }
11661 /* AMD falls thru. */
11662 RT_FALL_THRU();
11663
11664 case IEMMODE_32BIT:
11665 IEM_MC_BEGIN(3, 1);
11666 IEM_MC_ARG(uint16_t, u16Sel, 0);
11667 IEM_MC_ARG(uint32_t, offSeg, 1);
11668 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11672 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11673 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11674 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11675 IEM_MC_END();
11676 return VINF_SUCCESS;
11677
11678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11679 }
11680}
11681
11682
11683/**
11684 * Opcode 0xff /3.
11685 * @param bRm The RM byte.
11686 */
11687FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11688{
11689 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11690 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11691}
11692
11693
11694/**
11695 * Opcode 0xff /4.
11696 * @param bRm The RM byte.
11697 */
11698FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11699{
11700 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11701 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11702
11703 if (IEM_IS_MODRM_REG_MODE(bRm))
11704 {
11705 /* The new RIP is taken from a register. */
11706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11707 switch (pVCpu->iem.s.enmEffOpSize)
11708 {
11709 case IEMMODE_16BIT:
11710 IEM_MC_BEGIN(0, 1);
11711 IEM_MC_LOCAL(uint16_t, u16Target);
11712 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11713 IEM_MC_SET_RIP_U16(u16Target);
11714 IEM_MC_END()
11715 return VINF_SUCCESS;
11716
11717 case IEMMODE_32BIT:
11718 IEM_MC_BEGIN(0, 1);
11719 IEM_MC_LOCAL(uint32_t, u32Target);
11720 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11721 IEM_MC_SET_RIP_U32(u32Target);
11722 IEM_MC_END()
11723 return VINF_SUCCESS;
11724
11725 case IEMMODE_64BIT:
11726 IEM_MC_BEGIN(0, 1);
11727 IEM_MC_LOCAL(uint64_t, u64Target);
11728 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11729 IEM_MC_SET_RIP_U64(u64Target);
11730 IEM_MC_END()
11731 return VINF_SUCCESS;
11732
11733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11734 }
11735 }
11736 else
11737 {
11738 /* The new RIP is taken from a memory location. */
11739 switch (pVCpu->iem.s.enmEffOpSize)
11740 {
11741 case IEMMODE_16BIT:
11742 IEM_MC_BEGIN(0, 2);
11743 IEM_MC_LOCAL(uint16_t, u16Target);
11744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11747 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11748 IEM_MC_SET_RIP_U16(u16Target);
11749 IEM_MC_END()
11750 return VINF_SUCCESS;
11751
11752 case IEMMODE_32BIT:
11753 IEM_MC_BEGIN(0, 2);
11754 IEM_MC_LOCAL(uint32_t, u32Target);
11755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11758 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11759 IEM_MC_SET_RIP_U32(u32Target);
11760 IEM_MC_END()
11761 return VINF_SUCCESS;
11762
11763 case IEMMODE_64BIT:
11764 IEM_MC_BEGIN(0, 2);
11765 IEM_MC_LOCAL(uint64_t, u64Target);
11766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11769 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11770 IEM_MC_SET_RIP_U64(u64Target);
11771 IEM_MC_END()
11772 return VINF_SUCCESS;
11773
11774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11775 }
11776 }
11777}
11778
11779
11780/**
11781 * Opcode 0xff /5.
11782 * @param bRm The RM byte.
11783 */
11784FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11785{
11786 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11787 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11788}
11789
11790
11791/**
11792 * Opcode 0xff /6.
11793 * @param bRm The RM byte.
11794 */
11795FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11796{
11797 IEMOP_MNEMONIC(push_Ev, "push Ev");
11798
11799 /* Registers are handled by a common worker. */
11800 if (IEM_IS_MODRM_REG_MODE(bRm))
11801 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
11802
11803 /* Memory we do here. */
11804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11805 switch (pVCpu->iem.s.enmEffOpSize)
11806 {
11807 case IEMMODE_16BIT:
11808 IEM_MC_BEGIN(0, 2);
11809 IEM_MC_LOCAL(uint16_t, u16Src);
11810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11813 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11814 IEM_MC_PUSH_U16(u16Src);
11815 IEM_MC_ADVANCE_RIP();
11816 IEM_MC_END();
11817 return VINF_SUCCESS;
11818
11819 case IEMMODE_32BIT:
11820 IEM_MC_BEGIN(0, 2);
11821 IEM_MC_LOCAL(uint32_t, u32Src);
11822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11825 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11826 IEM_MC_PUSH_U32(u32Src);
11827 IEM_MC_ADVANCE_RIP();
11828 IEM_MC_END();
11829 return VINF_SUCCESS;
11830
11831 case IEMMODE_64BIT:
11832 IEM_MC_BEGIN(0, 2);
11833 IEM_MC_LOCAL(uint64_t, u64Src);
11834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11837 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11838 IEM_MC_PUSH_U64(u64Src);
11839 IEM_MC_ADVANCE_RIP();
11840 IEM_MC_END();
11841 return VINF_SUCCESS;
11842
11843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11844 }
11845}
11846
11847
11848/**
11849 * @opcode 0xff
11850 */
11851FNIEMOP_DEF(iemOp_Grp5)
11852{
11853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11854 switch (IEM_GET_MODRM_REG_8(bRm))
11855 {
11856 case 0:
11857 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11858 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11859 case 1:
11860 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11861 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11862 case 2:
11863 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11864 case 3:
11865 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11866 case 4:
11867 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11868 case 5:
11869 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11870 case 6:
11871 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11872 case 7:
11873 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11874 return IEMOP_RAISE_INVALID_OPCODE();
11875 }
11876 AssertFailedReturn(VERR_IEM_IPE_3);
11877}
11878
11879
11880
11881const PFNIEMOP g_apfnOneByteMap[256] =
11882{
11883 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11884 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11885 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11886 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11887 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11888 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11889 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11890 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11891 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11892 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11893 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11894 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11895 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11896 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11897 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11898 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11899 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11900 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11901 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11902 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11903 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11904 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11905 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11906 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11907 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11908 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11909 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11910 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11911 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11912 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11913 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11914 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11915 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11916 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11917 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11918 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11919 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11920 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11921 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11922 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11923 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11924 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11925 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11926 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11927 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11928 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11929 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11930 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11931 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11932 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11933 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11934 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11935 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11936 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11937 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11938 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11939 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11940 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11941 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11942 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11943 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11944 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11945 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11946 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11947};
11948
11949
11950/** @} */
11951
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette