VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 87778

Last change on this file since 87778 was 84476, checked in by vboxsync, 5 years ago

IEM: Reworked LOOP instruction so that it only shortcuts when logging is on to avoid WfW 3.11/Win95 crashes (see bugref:9735).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 393.5 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 84476 2020-05-24 18:17:04Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2020 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_sizes
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
910 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
912 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
913 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
914 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
915 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
916 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
917 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
918 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
919 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
920 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
921 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
922 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
925 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
926 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
927 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
928 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
929 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
930 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
931 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
933 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
936 */
937FNIEMOP_DEF(iemOp_aaa)
938{
939 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
940 IEMOP_HLP_NO_64BIT();
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
943
944 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
945}
946
947
948/**
949 * @opcode 0x38
950 */
951FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
952{
953 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
955}
956
957
958/**
959 * @opcode 0x39
960 */
961FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
962{
963 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x3a
970 */
971FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
972{
973 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3b
980 */
981FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
982{
983 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3c
990 */
991FNIEMOP_DEF(iemOp_cmp_Al_Ib)
992{
993 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3d
1000 */
1001FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1002{
1003 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3e
1010 */
1011FNIEMOP_DEF(iemOp_seg_DS)
1012{
1013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1015 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1016
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1019}
1020
1021
1022/**
1023 * @opcode 0x3f
1024 * @opfltest af,cf
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef pf,zf,sf,of
1027 * @opgroup og_gen_arith_dec
1028 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1029 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1030 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1031 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1032 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1033 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1034 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1035 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1036 * @optest8 amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1037 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1038 * @optest10 amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1039 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1040 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1041 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1042 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1043 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1045 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1046 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1047 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1048 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1049 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1050 * @optest22 amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1051 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1052 * @optest24 amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1053 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest26 amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest28 amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1058 * @optest30 amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1059 * @optest31 intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1060 * @optest32 amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1061 * @optest33 intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1062 * @optest34 amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1063 */
1064FNIEMOP_DEF(iemOp_aas)
1065{
1066 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1067 IEMOP_HLP_NO_64BIT();
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1070
1071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1072}
1073
1074
1075/**
1076 * Common 'inc/dec/not/neg register' helper.
1077 */
1078FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1079{
1080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1081 switch (pVCpu->iem.s.enmEffOpSize)
1082 {
1083 case IEMMODE_16BIT:
1084 IEM_MC_BEGIN(2, 0);
1085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1086 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1087 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1088 IEM_MC_REF_EFLAGS(pEFlags);
1089 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 return VINF_SUCCESS;
1093
1094 case IEMMODE_32BIT:
1095 IEM_MC_BEGIN(2, 0);
1096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1097 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1098 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1099 IEM_MC_REF_EFLAGS(pEFlags);
1100 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 return VINF_SUCCESS;
1105
1106 case IEMMODE_64BIT:
1107 IEM_MC_BEGIN(2, 0);
1108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1109 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1110 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1111 IEM_MC_REF_EFLAGS(pEFlags);
1112 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1113 IEM_MC_ADVANCE_RIP();
1114 IEM_MC_END();
1115 return VINF_SUCCESS;
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/**
1122 * @opcode 0x40
1123 */
1124FNIEMOP_DEF(iemOp_inc_eAX)
1125{
1126 /*
1127 * This is a REX prefix in 64-bit mode.
1128 */
1129 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1130 {
1131 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1132 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1133
1134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1136 }
1137
1138 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1139 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1140}
1141
1142
1143/**
1144 * @opcode 0x41
1145 */
1146FNIEMOP_DEF(iemOp_inc_eCX)
1147{
1148 /*
1149 * This is a REX prefix in 64-bit mode.
1150 */
1151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1152 {
1153 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1154 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1155 pVCpu->iem.s.uRexB = 1 << 3;
1156
1157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1159 }
1160
1161 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1163}
1164
1165
1166/**
1167 * @opcode 0x42
1168 */
1169FNIEMOP_DEF(iemOp_inc_eDX)
1170{
1171 /*
1172 * This is a REX prefix in 64-bit mode.
1173 */
1174 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1175 {
1176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1178 pVCpu->iem.s.uRexIndex = 1 << 3;
1179
1180 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1181 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1182 }
1183
1184 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1185 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1186}
1187
1188
1189
1190/**
1191 * @opcode 0x43
1192 */
1193FNIEMOP_DEF(iemOp_inc_eBX)
1194{
1195 /*
1196 * This is a REX prefix in 64-bit mode.
1197 */
1198 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1199 {
1200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1202 pVCpu->iem.s.uRexB = 1 << 3;
1203 pVCpu->iem.s.uRexIndex = 1 << 3;
1204
1205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1206 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1207 }
1208
1209 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1210 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1211}
1212
1213
1214/**
1215 * @opcode 0x44
1216 */
1217FNIEMOP_DEF(iemOp_inc_eSP)
1218{
1219 /*
1220 * This is a REX prefix in 64-bit mode.
1221 */
1222 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1223 {
1224 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1226 pVCpu->iem.s.uRexReg = 1 << 3;
1227
1228 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1229 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1230 }
1231
1232 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1233 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1234}
1235
1236
1237/**
1238 * @opcode 0x45
1239 */
1240FNIEMOP_DEF(iemOp_inc_eBP)
1241{
1242 /*
1243 * This is a REX prefix in 64-bit mode.
1244 */
1245 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1246 {
1247 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1249 pVCpu->iem.s.uRexReg = 1 << 3;
1250 pVCpu->iem.s.uRexB = 1 << 3;
1251
1252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1254 }
1255
1256 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1258}
1259
1260
1261/**
1262 * @opcode 0x46
1263 */
1264FNIEMOP_DEF(iemOp_inc_eSI)
1265{
1266 /*
1267 * This is a REX prefix in 64-bit mode.
1268 */
1269 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1270 {
1271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1273 pVCpu->iem.s.uRexReg = 1 << 3;
1274 pVCpu->iem.s.uRexIndex = 1 << 3;
1275
1276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1277 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1278 }
1279
1280 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1281 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1282}
1283
1284
1285/**
1286 * @opcode 0x47
1287 */
1288FNIEMOP_DEF(iemOp_inc_eDI)
1289{
1290 /*
1291 * This is a REX prefix in 64-bit mode.
1292 */
1293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1294 {
1295 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1297 pVCpu->iem.s.uRexReg = 1 << 3;
1298 pVCpu->iem.s.uRexB = 1 << 3;
1299 pVCpu->iem.s.uRexIndex = 1 << 3;
1300
1301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1303 }
1304
1305 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1306 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1307}
1308
1309
1310/**
1311 * @opcode 0x48
1312 */
1313FNIEMOP_DEF(iemOp_dec_eAX)
1314{
1315 /*
1316 * This is a REX prefix in 64-bit mode.
1317 */
1318 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1319 {
1320 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1321 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1322 iemRecalEffOpSize(pVCpu);
1323
1324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1326 }
1327
1328 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1329 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1330}
1331
1332
1333/**
1334 * @opcode 0x49
1335 */
1336FNIEMOP_DEF(iemOp_dec_eCX)
1337{
1338 /*
1339 * This is a REX prefix in 64-bit mode.
1340 */
1341 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1342 {
1343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1344 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1345 pVCpu->iem.s.uRexB = 1 << 3;
1346 iemRecalEffOpSize(pVCpu);
1347
1348 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1349 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1350 }
1351
1352 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1353 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1354}
1355
1356
1357/**
1358 * @opcode 0x4a
1359 */
1360FNIEMOP_DEF(iemOp_dec_eDX)
1361{
1362 /*
1363 * This is a REX prefix in 64-bit mode.
1364 */
1365 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1366 {
1367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1368 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1369 pVCpu->iem.s.uRexIndex = 1 << 3;
1370 iemRecalEffOpSize(pVCpu);
1371
1372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1374 }
1375
1376 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1377 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1378}
1379
1380
1381/**
1382 * @opcode 0x4b
1383 */
1384FNIEMOP_DEF(iemOp_dec_eBX)
1385{
1386 /*
1387 * This is a REX prefix in 64-bit mode.
1388 */
1389 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1390 {
1391 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1392 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1393 pVCpu->iem.s.uRexB = 1 << 3;
1394 pVCpu->iem.s.uRexIndex = 1 << 3;
1395 iemRecalEffOpSize(pVCpu);
1396
1397 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1398 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1399 }
1400
1401 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1402 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1403}
1404
1405
1406/**
1407 * @opcode 0x4c
1408 */
1409FNIEMOP_DEF(iemOp_dec_eSP)
1410{
1411 /*
1412 * This is a REX prefix in 64-bit mode.
1413 */
1414 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1415 {
1416 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1417 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1418 pVCpu->iem.s.uRexReg = 1 << 3;
1419 iemRecalEffOpSize(pVCpu);
1420
1421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1423 }
1424
1425 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1427}
1428
1429
1430/**
1431 * @opcode 0x4d
1432 */
1433FNIEMOP_DEF(iemOp_dec_eBP)
1434{
1435 /*
1436 * This is a REX prefix in 64-bit mode.
1437 */
1438 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1439 {
1440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1442 pVCpu->iem.s.uRexReg = 1 << 3;
1443 pVCpu->iem.s.uRexB = 1 << 3;
1444 iemRecalEffOpSize(pVCpu);
1445
1446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1448 }
1449
1450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1451 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1452}
1453
1454
1455/**
1456 * @opcode 0x4e
1457 */
1458FNIEMOP_DEF(iemOp_dec_eSI)
1459{
1460 /*
1461 * This is a REX prefix in 64-bit mode.
1462 */
1463 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1464 {
1465 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1466 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1467 pVCpu->iem.s.uRexReg = 1 << 3;
1468 pVCpu->iem.s.uRexIndex = 1 << 3;
1469 iemRecalEffOpSize(pVCpu);
1470
1471 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1472 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1473 }
1474
1475 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1476 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1477}
1478
1479
1480/**
1481 * @opcode 0x4f
1482 */
1483FNIEMOP_DEF(iemOp_dec_eDI)
1484{
1485 /*
1486 * This is a REX prefix in 64-bit mode.
1487 */
1488 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1489 {
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1492 pVCpu->iem.s.uRexReg = 1 << 3;
1493 pVCpu->iem.s.uRexB = 1 << 3;
1494 pVCpu->iem.s.uRexIndex = 1 << 3;
1495 iemRecalEffOpSize(pVCpu);
1496
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1499 }
1500
1501 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1503}
1504
1505
1506/**
1507 * Common 'push register' helper.
1508 */
1509FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1510{
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1513 {
1514 iReg |= pVCpu->iem.s.uRexB;
1515 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1516 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1517 }
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t, u16Value);
1524 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1525 IEM_MC_PUSH_U16(u16Value);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t, u32Value);
1533 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1534 IEM_MC_PUSH_U32(u32Value);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 break;
1538
1539 case IEMMODE_64BIT:
1540 IEM_MC_BEGIN(0, 1);
1541 IEM_MC_LOCAL(uint64_t, u64Value);
1542 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1543 IEM_MC_PUSH_U64(u64Value);
1544 IEM_MC_ADVANCE_RIP();
1545 IEM_MC_END();
1546 break;
1547 }
1548
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x50
1555 */
1556FNIEMOP_DEF(iemOp_push_eAX)
1557{
1558 IEMOP_MNEMONIC(push_rAX, "push rAX");
1559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1560}
1561
1562
1563/**
1564 * @opcode 0x51
1565 */
1566FNIEMOP_DEF(iemOp_push_eCX)
1567{
1568 IEMOP_MNEMONIC(push_rCX, "push rCX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1570}
1571
1572
1573/**
1574 * @opcode 0x52
1575 */
1576FNIEMOP_DEF(iemOp_push_eDX)
1577{
1578 IEMOP_MNEMONIC(push_rDX, "push rDX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1580}
1581
1582
1583/**
1584 * @opcode 0x53
1585 */
1586FNIEMOP_DEF(iemOp_push_eBX)
1587{
1588 IEMOP_MNEMONIC(push_rBX, "push rBX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1590}
1591
1592
1593/**
1594 * @opcode 0x54
1595 */
1596FNIEMOP_DEF(iemOp_push_eSP)
1597{
1598 IEMOP_MNEMONIC(push_rSP, "push rSP");
1599 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1600 {
1601 IEM_MC_BEGIN(0, 1);
1602 IEM_MC_LOCAL(uint16_t, u16Value);
1603 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1604 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1605 IEM_MC_PUSH_U16(u16Value);
1606 IEM_MC_ADVANCE_RIP();
1607 IEM_MC_END();
1608 }
1609 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1610}
1611
1612
1613/**
1614 * @opcode 0x55
1615 */
1616FNIEMOP_DEF(iemOp_push_eBP)
1617{
1618 IEMOP_MNEMONIC(push_rBP, "push rBP");
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1620}
1621
1622
1623/**
1624 * @opcode 0x56
1625 */
1626FNIEMOP_DEF(iemOp_push_eSI)
1627{
1628 IEMOP_MNEMONIC(push_rSI, "push rSI");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1630}
1631
1632
1633/**
1634 * @opcode 0x57
1635 */
1636FNIEMOP_DEF(iemOp_push_eDI)
1637{
1638 IEMOP_MNEMONIC(push_rDI, "push rDI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1640}
1641
1642
1643/**
1644 * Common 'pop register' helper.
1645 */
1646FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1647{
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1650 {
1651 iReg |= pVCpu->iem.s.uRexB;
1652 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1653 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1654 }
1655
1656 switch (pVCpu->iem.s.enmEffOpSize)
1657 {
1658 case IEMMODE_16BIT:
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1661 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1662 IEM_MC_POP_U16(pu16Dst);
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 break;
1666
1667 case IEMMODE_32BIT:
1668 IEM_MC_BEGIN(0, 1);
1669 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1670 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1671 IEM_MC_POP_U32(pu32Dst);
1672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_64BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1680 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1681 IEM_MC_POP_U64(pu64Dst);
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 break;
1685 }
1686
1687 return VINF_SUCCESS;
1688}
1689
1690
1691/**
1692 * @opcode 0x58
1693 */
1694FNIEMOP_DEF(iemOp_pop_eAX)
1695{
1696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1698}
1699
1700
1701/**
1702 * @opcode 0x59
1703 */
1704FNIEMOP_DEF(iemOp_pop_eCX)
1705{
1706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1708}
1709
1710
1711/**
1712 * @opcode 0x5a
1713 */
1714FNIEMOP_DEF(iemOp_pop_eDX)
1715{
1716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5b
1723 */
1724FNIEMOP_DEF(iemOp_pop_eBX)
1725{
1726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5c
1733 */
1734FNIEMOP_DEF(iemOp_pop_eSP)
1735{
1736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1737 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1738 {
1739 if (pVCpu->iem.s.uRexB)
1740 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1741 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1742 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1743 }
1744
1745 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1746 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1747 /** @todo add testcase for this instruction. */
1748 switch (pVCpu->iem.s.enmEffOpSize)
1749 {
1750 case IEMMODE_16BIT:
1751 IEM_MC_BEGIN(0, 1);
1752 IEM_MC_LOCAL(uint16_t, u16Dst);
1753 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1754 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1755 IEM_MC_ADVANCE_RIP();
1756 IEM_MC_END();
1757 break;
1758
1759 case IEMMODE_32BIT:
1760 IEM_MC_BEGIN(0, 1);
1761 IEM_MC_LOCAL(uint32_t, u32Dst);
1762 IEM_MC_POP_U32(&u32Dst);
1763 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 break;
1767
1768 case IEMMODE_64BIT:
1769 IEM_MC_BEGIN(0, 1);
1770 IEM_MC_LOCAL(uint64_t, u64Dst);
1771 IEM_MC_POP_U64(&u64Dst);
1772 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1773 IEM_MC_ADVANCE_RIP();
1774 IEM_MC_END();
1775 break;
1776 }
1777
1778 return VINF_SUCCESS;
1779}
1780
1781
1782/**
1783 * @opcode 0x5d
1784 */
1785FNIEMOP_DEF(iemOp_pop_eBP)
1786{
1787 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1788 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1789}
1790
1791
1792/**
1793 * @opcode 0x5e
1794 */
1795FNIEMOP_DEF(iemOp_pop_eSI)
1796{
1797 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1799}
1800
1801
1802/**
1803 * @opcode 0x5f
1804 */
1805FNIEMOP_DEF(iemOp_pop_eDI)
1806{
1807 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1809}
1810
1811
1812/**
1813 * @opcode 0x60
1814 */
1815FNIEMOP_DEF(iemOp_pusha)
1816{
1817 IEMOP_MNEMONIC(pusha, "pusha");
1818 IEMOP_HLP_MIN_186();
1819 IEMOP_HLP_NO_64BIT();
1820 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1822 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1823 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1824}
1825
1826
1827/**
1828 * @opcode 0x61
1829 */
1830FNIEMOP_DEF(iemOp_popa__mvex)
1831{
1832 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1833 {
1834 IEMOP_MNEMONIC(popa, "popa");
1835 IEMOP_HLP_MIN_186();
1836 IEMOP_HLP_NO_64BIT();
1837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1838 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1839 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1841 }
1842 IEMOP_MNEMONIC(mvex, "mvex");
1843 Log(("mvex prefix is not supported!\n"));
1844 return IEMOP_RAISE_INVALID_OPCODE();
1845}
1846
1847
1848/**
1849 * @opcode 0x62
1850 * @opmnemonic bound
1851 * @op1 Gv_RO
1852 * @op2 Ma
1853 * @opmincpu 80186
1854 * @ophints harmless invalid_64
1855 * @optest op1=0 op2=0 ->
1856 * @optest op1=1 op2=0 -> value.xcpt=5
1857 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1858 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1859 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1860 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1861 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1862 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1863 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1864 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1865 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1866 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1867 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1868 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1869 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1870 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1871 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1872 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1873 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1874 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1875 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1876 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1877 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1878 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1879 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1880 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1881 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1882 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1883 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1884 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1885 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1886 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1887 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1888 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1889 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1890 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1891 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1892 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1893 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1894 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1895 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1896 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1897 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1898 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1899 */
1900FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1901{
1902 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1903 compatability mode it is invalid with MOD=3.
1904
1905 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1906 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1907 given as R and X without an exact description, so we assume it builds on
1908 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1909 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1910 uint8_t bRm;
1911 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1912 {
1913 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1914 IEMOP_HLP_MIN_186();
1915 IEM_OPCODE_GET_NEXT_U8(&bRm);
1916 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1917 {
1918 /** @todo testcase: check that there are two memory accesses involved. Check
1919 * whether they're both read before the \#BR triggers. */
1920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1921 {
1922 IEM_MC_BEGIN(3, 1);
1923 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1924 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1925 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1930
1931 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1932 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1934
1935 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1936 IEM_MC_END();
1937 }
1938 else /* 32-bit operands */
1939 {
1940 IEM_MC_BEGIN(3, 1);
1941 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1942 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1943 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1945
1946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1948
1949 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1950 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1952
1953 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1954 IEM_MC_END();
1955 }
1956 }
1957
1958 /*
1959 * @opdone
1960 */
1961 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1962 {
1963 /* Note that there is no need for the CPU to fetch further bytes
1964 here because MODRM.MOD == 3. */
1965 Log(("evex not supported by the guest CPU!\n"));
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 }
1968 }
1969 else
1970 {
1971 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1972 * does modr/m read, whereas AMD probably doesn't... */
1973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1974 {
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1977 }
1978 IEM_OPCODE_GET_NEXT_U8(&bRm);
1979 }
1980
1981 IEMOP_MNEMONIC(evex, "evex");
1982 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1983 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1984 Log(("evex prefix is not implemented!\n"));
1985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1986}
1987
1988
1989/** Opcode 0x63 - non-64-bit modes. */
1990FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1991{
1992 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1993 IEMOP_HLP_MIN_286();
1994 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996
1997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1998 {
1999 /* Register */
2000 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2001 IEM_MC_BEGIN(3, 0);
2002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2003 IEM_MC_ARG(uint16_t, u16Src, 1);
2004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2005
2006 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
2008 IEM_MC_REF_EFLAGS(pEFlags);
2009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 else
2015 {
2016 /* Memory */
2017 IEM_MC_BEGIN(3, 2);
2018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2019 IEM_MC_ARG(uint16_t, u16Src, 1);
2020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2024 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2026 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2027 IEM_MC_FETCH_EFLAGS(EFlags);
2028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2029
2030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2031 IEM_MC_COMMIT_EFLAGS(EFlags);
2032 IEM_MC_ADVANCE_RIP();
2033 IEM_MC_END();
2034 }
2035 return VINF_SUCCESS;
2036
2037}
2038
2039
2040/**
2041 * @opcode 0x63
2042 *
2043 * @note This is a weird one. It works like a regular move instruction if
2044 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2045 * @todo This definitely needs a testcase to verify the odd cases. */
2046FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2047{
2048 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2049
2050 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052
2053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2054 {
2055 /*
2056 * Register to register.
2057 */
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 1);
2060 IEM_MC_LOCAL(uint64_t, u64Value);
2061 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2062 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * We're loading a register from memory.
2070 */
2071 IEM_MC_BEGIN(0, 2);
2072 IEM_MC_LOCAL(uint64_t, u64Value);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2078 IEM_MC_ADVANCE_RIP();
2079 IEM_MC_END();
2080 }
2081 return VINF_SUCCESS;
2082}
2083
2084
2085/**
2086 * @opcode 0x64
2087 * @opmnemonic segfs
2088 * @opmincpu 80386
2089 * @opgroup og_prefixes
2090 */
2091FNIEMOP_DEF(iemOp_seg_FS)
2092{
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2094 IEMOP_HLP_MIN_386();
2095
2096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2097 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2098
2099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2100 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2101}
2102
2103
2104/**
2105 * @opcode 0x65
2106 * @opmnemonic seggs
2107 * @opmincpu 80386
2108 * @opgroup og_prefixes
2109 */
2110FNIEMOP_DEF(iemOp_seg_GS)
2111{
2112 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2113 IEMOP_HLP_MIN_386();
2114
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2116 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2117
2118 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2119 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2120}
2121
2122
2123/**
2124 * @opcode 0x66
2125 * @opmnemonic opsize
2126 * @openc prefix
2127 * @opmincpu 80386
2128 * @ophints harmless
2129 * @opgroup og_prefixes
2130 */
2131FNIEMOP_DEF(iemOp_op_size)
2132{
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2134 IEMOP_HLP_MIN_386();
2135
2136 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2140 when REPZ or REPNZ are present. */
2141 if (pVCpu->iem.s.idxPrefix == 0)
2142 pVCpu->iem.s.idxPrefix = 1;
2143
2144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2146}
2147
2148
2149/**
2150 * @opcode 0x67
2151 * @opmnemonic addrsize
2152 * @openc prefix
2153 * @opmincpu 80386
2154 * @ophints harmless
2155 * @opgroup og_prefixes
2156 */
2157FNIEMOP_DEF(iemOp_addr_size)
2158{
2159 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2160 IEMOP_HLP_MIN_386();
2161
2162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2163 switch (pVCpu->iem.s.enmDefAddrMode)
2164 {
2165 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2166 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2167 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2168 default: AssertFailed();
2169 }
2170
2171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2173}
2174
2175
2176/**
2177 * @opcode 0x68
2178 */
2179FNIEMOP_DEF(iemOp_push_Iz)
2180{
2181 IEMOP_MNEMONIC(push_Iz, "push Iz");
2182 IEMOP_HLP_MIN_186();
2183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2184 switch (pVCpu->iem.s.enmEffOpSize)
2185 {
2186 case IEMMODE_16BIT:
2187 {
2188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEM_MC_BEGIN(0,0);
2191 IEM_MC_PUSH_U16(u16Imm);
2192 IEM_MC_ADVANCE_RIP();
2193 IEM_MC_END();
2194 return VINF_SUCCESS;
2195 }
2196
2197 case IEMMODE_32BIT:
2198 {
2199 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_BEGIN(0,0);
2202 IEM_MC_PUSH_U32(u32Imm);
2203 IEM_MC_ADVANCE_RIP();
2204 IEM_MC_END();
2205 return VINF_SUCCESS;
2206 }
2207
2208 case IEMMODE_64BIT:
2209 {
2210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_BEGIN(0,0);
2213 IEM_MC_PUSH_U64(u64Imm);
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 return VINF_SUCCESS;
2217 }
2218
2219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2220 }
2221}
2222
2223
2224/**
2225 * @opcode 0x69
2226 */
2227FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2228{
2229 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2230 IEMOP_HLP_MIN_186();
2231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2233
2234 switch (pVCpu->iem.s.enmEffOpSize)
2235 {
2236 case IEMMODE_16BIT:
2237 {
2238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2239 {
2240 /* register operand */
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(3, 1);
2245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2246 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint16_t, u16Tmp);
2249
2250 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2251 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2252 IEM_MC_REF_EFLAGS(pEFlags);
2253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2254 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2255
2256 IEM_MC_ADVANCE_RIP();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 /* memory operand */
2262 IEM_MC_BEGIN(3, 2);
2263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2264 IEM_MC_ARG(uint16_t, u16Src, 1);
2265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2266 IEM_MC_LOCAL(uint16_t, u16Tmp);
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2271 IEM_MC_ASSIGN(u16Src, u16Imm);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2274 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2275 IEM_MC_REF_EFLAGS(pEFlags);
2276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2277 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2278
2279 IEM_MC_ADVANCE_RIP();
2280 IEM_MC_END();
2281 }
2282 return VINF_SUCCESS;
2283 }
2284
2285 case IEMMODE_32BIT:
2286 {
2287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2288 {
2289 /* register operand */
2290 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292
2293 IEM_MC_BEGIN(3, 1);
2294 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2295 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2296 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2297 IEM_MC_LOCAL(uint32_t, u32Tmp);
2298
2299 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2300 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2301 IEM_MC_REF_EFLAGS(pEFlags);
2302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2303 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2304
2305 IEM_MC_ADVANCE_RIP();
2306 IEM_MC_END();
2307 }
2308 else
2309 {
2310 /* memory operand */
2311 IEM_MC_BEGIN(3, 2);
2312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2313 IEM_MC_ARG(uint32_t, u32Src, 1);
2314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2315 IEM_MC_LOCAL(uint32_t, u32Tmp);
2316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2317
2318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2319 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2320 IEM_MC_ASSIGN(u32Src, u32Imm);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2323 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2324 IEM_MC_REF_EFLAGS(pEFlags);
2325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2326 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2327
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 return VINF_SUCCESS;
2332 }
2333
2334 case IEMMODE_64BIT:
2335 {
2336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2337 {
2338 /* register operand */
2339 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341
2342 IEM_MC_BEGIN(3, 1);
2343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2344 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2346 IEM_MC_LOCAL(uint64_t, u64Tmp);
2347
2348 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2349 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2350 IEM_MC_REF_EFLAGS(pEFlags);
2351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2352 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2353
2354 IEM_MC_ADVANCE_RIP();
2355 IEM_MC_END();
2356 }
2357 else
2358 {
2359 /* memory operand */
2360 IEM_MC_BEGIN(3, 2);
2361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2362 IEM_MC_ARG(uint64_t, u64Src, 1);
2363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2364 IEM_MC_LOCAL(uint64_t, u64Tmp);
2365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2366
2367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2368 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2369 IEM_MC_ASSIGN(u64Src, u64Imm);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2372 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2373 IEM_MC_REF_EFLAGS(pEFlags);
2374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2376
2377 IEM_MC_ADVANCE_RIP();
2378 IEM_MC_END();
2379 }
2380 return VINF_SUCCESS;
2381 }
2382 }
2383 AssertFailedReturn(VERR_IEM_IPE_9);
2384}
2385
2386
2387/**
2388 * @opcode 0x6a
2389 */
2390FNIEMOP_DEF(iemOp_push_Ib)
2391{
2392 IEMOP_MNEMONIC(push_Ib, "push Ib");
2393 IEMOP_HLP_MIN_186();
2394 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2397
2398 IEM_MC_BEGIN(0,0);
2399 switch (pVCpu->iem.s.enmEffOpSize)
2400 {
2401 case IEMMODE_16BIT:
2402 IEM_MC_PUSH_U16(i8Imm);
2403 break;
2404 case IEMMODE_32BIT:
2405 IEM_MC_PUSH_U32(i8Imm);
2406 break;
2407 case IEMMODE_64BIT:
2408 IEM_MC_PUSH_U64(i8Imm);
2409 break;
2410 }
2411 IEM_MC_ADVANCE_RIP();
2412 IEM_MC_END();
2413 return VINF_SUCCESS;
2414}
2415
2416
2417/**
2418 * @opcode 0x6b
2419 */
2420FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2421{
2422 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2423 IEMOP_HLP_MIN_186();
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2426
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2431 {
2432 /* register operand */
2433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2435
2436 IEM_MC_BEGIN(3, 1);
2437 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2438 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2439 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2440 IEM_MC_LOCAL(uint16_t, u16Tmp);
2441
2442 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2443 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2444 IEM_MC_REF_EFLAGS(pEFlags);
2445 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2446 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2447
2448 IEM_MC_ADVANCE_RIP();
2449 IEM_MC_END();
2450 }
2451 else
2452 {
2453 /* memory operand */
2454 IEM_MC_BEGIN(3, 2);
2455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2456 IEM_MC_ARG(uint16_t, u16Src, 1);
2457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2458 IEM_MC_LOCAL(uint16_t, u16Tmp);
2459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2460
2461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2462 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2463 IEM_MC_ASSIGN(u16Src, u16Imm);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2466 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2467 IEM_MC_REF_EFLAGS(pEFlags);
2468 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2469 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2470
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 return VINF_SUCCESS;
2475
2476 case IEMMODE_32BIT:
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* register operand */
2480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482
2483 IEM_MC_BEGIN(3, 1);
2484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2485 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2487 IEM_MC_LOCAL(uint32_t, u32Tmp);
2488
2489 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2490 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2491 IEM_MC_REF_EFLAGS(pEFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2493 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2494
2495 IEM_MC_ADVANCE_RIP();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 /* memory operand */
2501 IEM_MC_BEGIN(3, 2);
2502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2503 IEM_MC_ARG(uint32_t, u32Src, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint32_t, u32Tmp);
2506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2507
2508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2509 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2510 IEM_MC_ASSIGN(u32Src, u32Imm);
2511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2512 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2513 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2514 IEM_MC_REF_EFLAGS(pEFlags);
2515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP();
2519 IEM_MC_END();
2520 }
2521 return VINF_SUCCESS;
2522
2523 case IEMMODE_64BIT:
2524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2525 {
2526 /* register operand */
2527 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2529
2530 IEM_MC_BEGIN(3, 1);
2531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2532 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2534 IEM_MC_LOCAL(uint64_t, u64Tmp);
2535
2536 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2537 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2538 IEM_MC_REF_EFLAGS(pEFlags);
2539 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2540 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 else
2546 {
2547 /* memory operand */
2548 IEM_MC_BEGIN(3, 2);
2549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2550 IEM_MC_ARG(uint64_t, u64Src, 1);
2551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2552 IEM_MC_LOCAL(uint64_t, u64Tmp);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2556 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2557 IEM_MC_ASSIGN(u64Src, u64Imm);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2560 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2561 IEM_MC_REF_EFLAGS(pEFlags);
2562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2563 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564
2565 IEM_MC_ADVANCE_RIP();
2566 IEM_MC_END();
2567 }
2568 return VINF_SUCCESS;
2569 }
2570 AssertFailedReturn(VERR_IEM_IPE_8);
2571}
2572
2573
2574/**
2575 * @opcode 0x6c
2576 */
2577FNIEMOP_DEF(iemOp_insb_Yb_DX)
2578{
2579 IEMOP_HLP_MIN_186();
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2582 {
2583 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2584 switch (pVCpu->iem.s.enmEffAddrMode)
2585 {
2586 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2587 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2588 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2590 }
2591 }
2592 else
2593 {
2594 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2595 switch (pVCpu->iem.s.enmEffAddrMode)
2596 {
2597 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2601 }
2602 }
2603}
2604
2605
2606/**
2607 * @opcode 0x6d
2608 */
2609FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2610{
2611 IEMOP_HLP_MIN_186();
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2614 {
2615 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2616 switch (pVCpu->iem.s.enmEffOpSize)
2617 {
2618 case IEMMODE_16BIT:
2619 switch (pVCpu->iem.s.enmEffAddrMode)
2620 {
2621 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2625 }
2626 break;
2627 case IEMMODE_64BIT:
2628 case IEMMODE_32BIT:
2629 switch (pVCpu->iem.s.enmEffAddrMode)
2630 {
2631 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2632 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2633 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2635 }
2636 break;
2637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2638 }
2639 }
2640 else
2641 {
2642 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2643 switch (pVCpu->iem.s.enmEffOpSize)
2644 {
2645 case IEMMODE_16BIT:
2646 switch (pVCpu->iem.s.enmEffAddrMode)
2647 {
2648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2652 }
2653 break;
2654 case IEMMODE_64BIT:
2655 case IEMMODE_32BIT:
2656 switch (pVCpu->iem.s.enmEffAddrMode)
2657 {
2658 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2659 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2660 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2662 }
2663 break;
2664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2665 }
2666 }
2667}
2668
2669
2670/**
2671 * @opcode 0x6e
2672 */
2673FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2674{
2675 IEMOP_HLP_MIN_186();
2676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2677 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2678 {
2679 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2680 switch (pVCpu->iem.s.enmEffAddrMode)
2681 {
2682 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687 }
2688 else
2689 {
2690 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2691 switch (pVCpu->iem.s.enmEffAddrMode)
2692 {
2693 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2697 }
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x6f
2704 */
2705FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2706{
2707 IEMOP_HLP_MIN_186();
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2710 {
2711 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 switch (pVCpu->iem.s.enmEffAddrMode)
2716 {
2717 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2718 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2719 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2721 }
2722 break;
2723 case IEMMODE_64BIT:
2724 case IEMMODE_32BIT:
2725 switch (pVCpu->iem.s.enmEffAddrMode)
2726 {
2727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2731 }
2732 break;
2733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2734 }
2735 }
2736 else
2737 {
2738 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2739 switch (pVCpu->iem.s.enmEffOpSize)
2740 {
2741 case IEMMODE_16BIT:
2742 switch (pVCpu->iem.s.enmEffAddrMode)
2743 {
2744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2748 }
2749 break;
2750 case IEMMODE_64BIT:
2751 case IEMMODE_32BIT:
2752 switch (pVCpu->iem.s.enmEffAddrMode)
2753 {
2754 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2755 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2758 }
2759 break;
2760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2761 }
2762 }
2763}
2764
2765
2766/**
2767 * @opcode 0x70
2768 */
2769FNIEMOP_DEF(iemOp_jo_Jb)
2770{
2771 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2772 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2775
2776 IEM_MC_BEGIN(0, 0);
2777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2778 IEM_MC_REL_JMP_S8(i8Imm);
2779 } IEM_MC_ELSE() {
2780 IEM_MC_ADVANCE_RIP();
2781 } IEM_MC_ENDIF();
2782 IEM_MC_END();
2783 return VINF_SUCCESS;
2784}
2785
2786
2787/**
2788 * @opcode 0x71
2789 */
2790FNIEMOP_DEF(iemOp_jno_Jb)
2791{
2792 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2793 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2796
2797 IEM_MC_BEGIN(0, 0);
2798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2799 IEM_MC_ADVANCE_RIP();
2800 } IEM_MC_ELSE() {
2801 IEM_MC_REL_JMP_S8(i8Imm);
2802 } IEM_MC_ENDIF();
2803 IEM_MC_END();
2804 return VINF_SUCCESS;
2805}
2806
2807/**
2808 * @opcode 0x72
2809 */
2810FNIEMOP_DEF(iemOp_jc_Jb)
2811{
2812 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2813 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816
2817 IEM_MC_BEGIN(0, 0);
2818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2819 IEM_MC_REL_JMP_S8(i8Imm);
2820 } IEM_MC_ELSE() {
2821 IEM_MC_ADVANCE_RIP();
2822 } IEM_MC_ENDIF();
2823 IEM_MC_END();
2824 return VINF_SUCCESS;
2825}
2826
2827
2828/**
2829 * @opcode 0x73
2830 */
2831FNIEMOP_DEF(iemOp_jnc_Jb)
2832{
2833 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2834 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2837
2838 IEM_MC_BEGIN(0, 0);
2839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2840 IEM_MC_ADVANCE_RIP();
2841 } IEM_MC_ELSE() {
2842 IEM_MC_REL_JMP_S8(i8Imm);
2843 } IEM_MC_ENDIF();
2844 IEM_MC_END();
2845 return VINF_SUCCESS;
2846}
2847
2848
2849/**
2850 * @opcode 0x74
2851 */
2852FNIEMOP_DEF(iemOp_je_Jb)
2853{
2854 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2858
2859 IEM_MC_BEGIN(0, 0);
2860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2861 IEM_MC_REL_JMP_S8(i8Imm);
2862 } IEM_MC_ELSE() {
2863 IEM_MC_ADVANCE_RIP();
2864 } IEM_MC_ENDIF();
2865 IEM_MC_END();
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/**
2871 * @opcode 0x75
2872 */
2873FNIEMOP_DEF(iemOp_jne_Jb)
2874{
2875 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2876 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2879
2880 IEM_MC_BEGIN(0, 0);
2881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2882 IEM_MC_ADVANCE_RIP();
2883 } IEM_MC_ELSE() {
2884 IEM_MC_REL_JMP_S8(i8Imm);
2885 } IEM_MC_ENDIF();
2886 IEM_MC_END();
2887 return VINF_SUCCESS;
2888}
2889
2890
2891/**
2892 * @opcode 0x76
2893 */
2894FNIEMOP_DEF(iemOp_jbe_Jb)
2895{
2896 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2900
2901 IEM_MC_BEGIN(0, 0);
2902 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2903 IEM_MC_REL_JMP_S8(i8Imm);
2904 } IEM_MC_ELSE() {
2905 IEM_MC_ADVANCE_RIP();
2906 } IEM_MC_ENDIF();
2907 IEM_MC_END();
2908 return VINF_SUCCESS;
2909}
2910
2911
2912/**
2913 * @opcode 0x77
2914 */
2915FNIEMOP_DEF(iemOp_jnbe_Jb)
2916{
2917 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2918 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2921
2922 IEM_MC_BEGIN(0, 0);
2923 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2924 IEM_MC_ADVANCE_RIP();
2925 } IEM_MC_ELSE() {
2926 IEM_MC_REL_JMP_S8(i8Imm);
2927 } IEM_MC_ENDIF();
2928 IEM_MC_END();
2929 return VINF_SUCCESS;
2930}
2931
2932
2933/**
2934 * @opcode 0x78
2935 */
2936FNIEMOP_DEF(iemOp_js_Jb)
2937{
2938 IEMOP_MNEMONIC(js_Jb, "js Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2945 IEM_MC_REL_JMP_S8(i8Imm);
2946 } IEM_MC_ELSE() {
2947 IEM_MC_ADVANCE_RIP();
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950 return VINF_SUCCESS;
2951}
2952
2953
2954/**
2955 * @opcode 0x79
2956 */
2957FNIEMOP_DEF(iemOp_jns_Jb)
2958{
2959 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2960 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2963
2964 IEM_MC_BEGIN(0, 0);
2965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2966 IEM_MC_ADVANCE_RIP();
2967 } IEM_MC_ELSE() {
2968 IEM_MC_REL_JMP_S8(i8Imm);
2969 } IEM_MC_ENDIF();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972}
2973
2974
2975/**
2976 * @opcode 0x7a
2977 */
2978FNIEMOP_DEF(iemOp_jp_Jb)
2979{
2980 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2981 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2984
2985 IEM_MC_BEGIN(0, 0);
2986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2987 IEM_MC_REL_JMP_S8(i8Imm);
2988 } IEM_MC_ELSE() {
2989 IEM_MC_ADVANCE_RIP();
2990 } IEM_MC_ENDIF();
2991 IEM_MC_END();
2992 return VINF_SUCCESS;
2993}
2994
2995
2996/**
2997 * @opcode 0x7b
2998 */
2999FNIEMOP_DEF(iemOp_jnp_Jb)
3000{
3001 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3002 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3005
3006 IEM_MC_BEGIN(0, 0);
3007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3008 IEM_MC_ADVANCE_RIP();
3009 } IEM_MC_ELSE() {
3010 IEM_MC_REL_JMP_S8(i8Imm);
3011 } IEM_MC_ENDIF();
3012 IEM_MC_END();
3013 return VINF_SUCCESS;
3014}
3015
3016
3017/**
3018 * @opcode 0x7c
3019 */
3020FNIEMOP_DEF(iemOp_jl_Jb)
3021{
3022 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3023 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3026
3027 IEM_MC_BEGIN(0, 0);
3028 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3029 IEM_MC_REL_JMP_S8(i8Imm);
3030 } IEM_MC_ELSE() {
3031 IEM_MC_ADVANCE_RIP();
3032 } IEM_MC_ENDIF();
3033 IEM_MC_END();
3034 return VINF_SUCCESS;
3035}
3036
3037
3038/**
3039 * @opcode 0x7d
3040 */
3041FNIEMOP_DEF(iemOp_jnl_Jb)
3042{
3043 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3047
3048 IEM_MC_BEGIN(0, 0);
3049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3050 IEM_MC_ADVANCE_RIP();
3051 } IEM_MC_ELSE() {
3052 IEM_MC_REL_JMP_S8(i8Imm);
3053 } IEM_MC_ENDIF();
3054 IEM_MC_END();
3055 return VINF_SUCCESS;
3056}
3057
3058
3059/**
3060 * @opcode 0x7e
3061 */
3062FNIEMOP_DEF(iemOp_jle_Jb)
3063{
3064 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3065 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3068
3069 IEM_MC_BEGIN(0, 0);
3070 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3071 IEM_MC_REL_JMP_S8(i8Imm);
3072 } IEM_MC_ELSE() {
3073 IEM_MC_ADVANCE_RIP();
3074 } IEM_MC_ENDIF();
3075 IEM_MC_END();
3076 return VINF_SUCCESS;
3077}
3078
3079
3080/**
3081 * @opcode 0x7f
3082 */
3083FNIEMOP_DEF(iemOp_jnle_Jb)
3084{
3085 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3086 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3092 IEM_MC_ADVANCE_RIP();
3093 } IEM_MC_ELSE() {
3094 IEM_MC_REL_JMP_S8(i8Imm);
3095 } IEM_MC_ENDIF();
3096 IEM_MC_END();
3097 return VINF_SUCCESS;
3098}
3099
3100
3101/**
3102 * @opcode 0x80
3103 */
3104FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3105{
3106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3108 {
3109 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3110 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3111 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3112 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3113 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3114 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3115 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3116 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3117 }
3118 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3119
3120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3121 {
3122 /* register target */
3123 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_BEGIN(3, 0);
3126 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3127 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3129
3130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3131 IEM_MC_REF_EFLAGS(pEFlags);
3132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3133
3134 IEM_MC_ADVANCE_RIP();
3135 IEM_MC_END();
3136 }
3137 else
3138 {
3139 /* memory target */
3140 uint32_t fAccess;
3141 if (pImpl->pfnLockedU8)
3142 fAccess = IEM_ACCESS_DATA_RW;
3143 else /* CMP */
3144 fAccess = IEM_ACCESS_DATA_R;
3145 IEM_MC_BEGIN(3, 2);
3146 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3149
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3152 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3153 if (pImpl->pfnLockedU8)
3154 IEMOP_HLP_DONE_DECODING();
3155 else
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157
3158 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3159 IEM_MC_FETCH_EFLAGS(EFlags);
3160 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3162 else
3163 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3164
3165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3166 IEM_MC_COMMIT_EFLAGS(EFlags);
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 }
3170 return VINF_SUCCESS;
3171}
3172
3173
3174/**
3175 * @opcode 0x81
3176 */
3177FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3181 {
3182 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3183 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3184 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3185 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3186 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3187 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3188 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3189 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3190 }
3191 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3192
3193 switch (pVCpu->iem.s.enmEffOpSize)
3194 {
3195 case IEMMODE_16BIT:
3196 {
3197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3198 {
3199 /* register target */
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202 IEM_MC_BEGIN(3, 0);
3203 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3204 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3206
3207 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3208 IEM_MC_REF_EFLAGS(pEFlags);
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /* memory target */
3217 uint32_t fAccess;
3218 if (pImpl->pfnLockedU16)
3219 fAccess = IEM_ACCESS_DATA_RW;
3220 else /* CMP, TEST */
3221 fAccess = IEM_ACCESS_DATA_R;
3222 IEM_MC_BEGIN(3, 2);
3223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3224 IEM_MC_ARG(uint16_t, u16Src, 1);
3225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3229 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3230 IEM_MC_ASSIGN(u16Src, u16Imm);
3231 if (pImpl->pfnLockedU16)
3232 IEMOP_HLP_DONE_DECODING();
3233 else
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3236 IEM_MC_FETCH_EFLAGS(EFlags);
3237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3239 else
3240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3241
3242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3243 IEM_MC_COMMIT_EFLAGS(EFlags);
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 }
3247 break;
3248 }
3249
3250 case IEMMODE_32BIT:
3251 {
3252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3253 {
3254 /* register target */
3255 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_BEGIN(3, 0);
3258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3259 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3261
3262 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3265 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3266
3267 IEM_MC_ADVANCE_RIP();
3268 IEM_MC_END();
3269 }
3270 else
3271 {
3272 /* memory target */
3273 uint32_t fAccess;
3274 if (pImpl->pfnLockedU32)
3275 fAccess = IEM_ACCESS_DATA_RW;
3276 else /* CMP, TEST */
3277 fAccess = IEM_ACCESS_DATA_R;
3278 IEM_MC_BEGIN(3, 2);
3279 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3280 IEM_MC_ARG(uint32_t, u32Src, 1);
3281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3283
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3285 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3286 IEM_MC_ASSIGN(u32Src, u32Imm);
3287 if (pImpl->pfnLockedU32)
3288 IEMOP_HLP_DONE_DECODING();
3289 else
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3292 IEM_MC_FETCH_EFLAGS(EFlags);
3293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3295 else
3296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3297
3298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3299 IEM_MC_COMMIT_EFLAGS(EFlags);
3300 IEM_MC_ADVANCE_RIP();
3301 IEM_MC_END();
3302 }
3303 break;
3304 }
3305
3306 case IEMMODE_64BIT:
3307 {
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 /* register target */
3311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_BEGIN(3, 0);
3314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3315 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3317
3318 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3319 IEM_MC_REF_EFLAGS(pEFlags);
3320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3321
3322 IEM_MC_ADVANCE_RIP();
3323 IEM_MC_END();
3324 }
3325 else
3326 {
3327 /* memory target */
3328 uint32_t fAccess;
3329 if (pImpl->pfnLockedU64)
3330 fAccess = IEM_ACCESS_DATA_RW;
3331 else /* CMP */
3332 fAccess = IEM_ACCESS_DATA_R;
3333 IEM_MC_BEGIN(3, 2);
3334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3335 IEM_MC_ARG(uint64_t, u64Src, 1);
3336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3338
3339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3341 if (pImpl->pfnLockedU64)
3342 IEMOP_HLP_DONE_DECODING();
3343 else
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3345 IEM_MC_ASSIGN(u64Src, u64Imm);
3346 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3347 IEM_MC_FETCH_EFLAGS(EFlags);
3348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3350 else
3351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3352
3353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3354 IEM_MC_COMMIT_EFLAGS(EFlags);
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 }
3358 break;
3359 }
3360 }
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/**
3366 * @opcode 0x82
3367 * @opmnemonic grp1_82
3368 * @opgroup og_groups
3369 */
3370FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3371{
3372 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3373 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3374}
3375
3376
3377/**
3378 * @opcode 0x83
3379 */
3380FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3381{
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3384 {
3385 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3386 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3387 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3388 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3389 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3390 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3391 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3392 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3393 }
3394 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3395 to the 386 even if absent in the intel reference manuals and some
3396 3rd party opcode listings. */
3397 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3398
3399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3400 {
3401 /*
3402 * Register target
3403 */
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3406 switch (pVCpu->iem.s.enmEffOpSize)
3407 {
3408 case IEMMODE_16BIT:
3409 {
3410 IEM_MC_BEGIN(3, 0);
3411 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3412 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3413 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3414
3415 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3416 IEM_MC_REF_EFLAGS(pEFlags);
3417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3418
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 break;
3422 }
3423
3424 case IEMMODE_32BIT:
3425 {
3426 IEM_MC_BEGIN(3, 0);
3427 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3428 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3429 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3430
3431 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3432 IEM_MC_REF_EFLAGS(pEFlags);
3433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3435
3436 IEM_MC_ADVANCE_RIP();
3437 IEM_MC_END();
3438 break;
3439 }
3440
3441 case IEMMODE_64BIT:
3442 {
3443 IEM_MC_BEGIN(3, 0);
3444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3445 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3446 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3447
3448 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3449 IEM_MC_REF_EFLAGS(pEFlags);
3450 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 break;
3455 }
3456 }
3457 }
3458 else
3459 {
3460 /*
3461 * Memory target.
3462 */
3463 uint32_t fAccess;
3464 if (pImpl->pfnLockedU16)
3465 fAccess = IEM_ACCESS_DATA_RW;
3466 else /* CMP */
3467 fAccess = IEM_ACCESS_DATA_R;
3468
3469 switch (pVCpu->iem.s.enmEffOpSize)
3470 {
3471 case IEMMODE_16BIT:
3472 {
3473 IEM_MC_BEGIN(3, 2);
3474 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3475 IEM_MC_ARG(uint16_t, u16Src, 1);
3476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3481 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3482 if (pImpl->pfnLockedU16)
3483 IEMOP_HLP_DONE_DECODING();
3484 else
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3487 IEM_MC_FETCH_EFLAGS(EFlags);
3488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3490 else
3491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3492
3493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3494 IEM_MC_COMMIT_EFLAGS(EFlags);
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 break;
3498 }
3499
3500 case IEMMODE_32BIT:
3501 {
3502 IEM_MC_BEGIN(3, 2);
3503 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3504 IEM_MC_ARG(uint32_t, u32Src, 1);
3505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3510 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3511 if (pImpl->pfnLockedU32)
3512 IEMOP_HLP_DONE_DECODING();
3513 else
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3516 IEM_MC_FETCH_EFLAGS(EFlags);
3517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3519 else
3520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3521
3522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3523 IEM_MC_COMMIT_EFLAGS(EFlags);
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 break;
3527 }
3528
3529 case IEMMODE_64BIT:
3530 {
3531 IEM_MC_BEGIN(3, 2);
3532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3533 IEM_MC_ARG(uint64_t, u64Src, 1);
3534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3536
3537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3538 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3539 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3540 if (pImpl->pfnLockedU64)
3541 IEMOP_HLP_DONE_DECODING();
3542 else
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3545 IEM_MC_FETCH_EFLAGS(EFlags);
3546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3548 else
3549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3550
3551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3552 IEM_MC_COMMIT_EFLAGS(EFlags);
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 break;
3556 }
3557 }
3558 }
3559 return VINF_SUCCESS;
3560}
3561
3562
3563/**
3564 * @opcode 0x84
3565 */
3566FNIEMOP_DEF(iemOp_test_Eb_Gb)
3567{
3568 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3571}
3572
3573
3574/**
3575 * @opcode 0x85
3576 */
3577FNIEMOP_DEF(iemOp_test_Ev_Gv)
3578{
3579 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3582}
3583
3584
3585/**
3586 * @opcode 0x86
3587 */
3588FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3589{
3590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3591 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3592
3593 /*
3594 * If rm is denoting a register, no more instruction bytes.
3595 */
3596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3597 {
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599
3600 IEM_MC_BEGIN(0, 2);
3601 IEM_MC_LOCAL(uint8_t, uTmp1);
3602 IEM_MC_LOCAL(uint8_t, uTmp2);
3603
3604 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3605 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3606 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3607 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /*
3615 * We're accessing memory.
3616 */
3617/** @todo the register must be committed separately! */
3618 IEM_MC_BEGIN(2, 2);
3619 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3620 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3622
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3624 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3625 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3626 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3628
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 return VINF_SUCCESS;
3633}
3634
3635
3636/**
3637 * @opcode 0x87
3638 */
3639FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3640{
3641 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643
3644 /*
3645 * If rm is denoting a register, no more instruction bytes.
3646 */
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650
3651 switch (pVCpu->iem.s.enmEffOpSize)
3652 {
3653 case IEMMODE_16BIT:
3654 IEM_MC_BEGIN(0, 2);
3655 IEM_MC_LOCAL(uint16_t, uTmp1);
3656 IEM_MC_LOCAL(uint16_t, uTmp2);
3657
3658 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3659 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3660 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3661 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3662
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 return VINF_SUCCESS;
3666
3667 case IEMMODE_32BIT:
3668 IEM_MC_BEGIN(0, 2);
3669 IEM_MC_LOCAL(uint32_t, uTmp1);
3670 IEM_MC_LOCAL(uint32_t, uTmp2);
3671
3672 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3673 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3674 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3675 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3676
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 return VINF_SUCCESS;
3680
3681 case IEMMODE_64BIT:
3682 IEM_MC_BEGIN(0, 2);
3683 IEM_MC_LOCAL(uint64_t, uTmp1);
3684 IEM_MC_LOCAL(uint64_t, uTmp2);
3685
3686 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3687 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3688 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3689 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3690
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 return VINF_SUCCESS;
3694
3695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3696 }
3697 }
3698 else
3699 {
3700 /*
3701 * We're accessing memory.
3702 */
3703 switch (pVCpu->iem.s.enmEffOpSize)
3704 {
3705/** @todo the register must be committed separately! */
3706 case IEMMODE_16BIT:
3707 IEM_MC_BEGIN(2, 2);
3708 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3709 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3713 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3714 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 return VINF_SUCCESS;
3721
3722 case IEMMODE_32BIT:
3723 IEM_MC_BEGIN(2, 2);
3724 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3725 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3729 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3730 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3733
3734 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3735 IEM_MC_ADVANCE_RIP();
3736 IEM_MC_END();
3737 return VINF_SUCCESS;
3738
3739 case IEMMODE_64BIT:
3740 IEM_MC_BEGIN(2, 2);
3741 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3742 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744
3745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3746 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3747 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3748 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 return VINF_SUCCESS;
3754
3755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3756 }
3757 }
3758}
3759
3760
3761/**
3762 * @opcode 0x88
3763 */
3764FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3765{
3766 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3767
3768 uint8_t bRm;
3769 IEM_OPCODE_GET_NEXT_U8(&bRm);
3770
3771 /*
3772 * If rm is denoting a register, no more instruction bytes.
3773 */
3774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3775 {
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777 IEM_MC_BEGIN(0, 1);
3778 IEM_MC_LOCAL(uint8_t, u8Value);
3779 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3780 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /*
3787 * We're writing a register to memory.
3788 */
3789 IEM_MC_BEGIN(0, 2);
3790 IEM_MC_LOCAL(uint8_t, u8Value);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3795 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3796 IEM_MC_ADVANCE_RIP();
3797 IEM_MC_END();
3798 }
3799 return VINF_SUCCESS;
3800
3801}
3802
3803
3804/**
3805 * @opcode 0x89
3806 */
3807FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3808{
3809 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3810
3811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3812
3813 /*
3814 * If rm is denoting a register, no more instruction bytes.
3815 */
3816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3817 {
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 switch (pVCpu->iem.s.enmEffOpSize)
3820 {
3821 case IEMMODE_16BIT:
3822 IEM_MC_BEGIN(0, 1);
3823 IEM_MC_LOCAL(uint16_t, u16Value);
3824 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3825 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 break;
3829
3830 case IEMMODE_32BIT:
3831 IEM_MC_BEGIN(0, 1);
3832 IEM_MC_LOCAL(uint32_t, u32Value);
3833 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3834 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 break;
3838
3839 case IEMMODE_64BIT:
3840 IEM_MC_BEGIN(0, 1);
3841 IEM_MC_LOCAL(uint64_t, u64Value);
3842 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3843 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 break;
3847 }
3848 }
3849 else
3850 {
3851 /*
3852 * We're writing a register to memory.
3853 */
3854 switch (pVCpu->iem.s.enmEffOpSize)
3855 {
3856 case IEMMODE_16BIT:
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(uint16_t, u16Value);
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3863 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 break;
3867
3868 case IEMMODE_32BIT:
3869 IEM_MC_BEGIN(0, 2);
3870 IEM_MC_LOCAL(uint32_t, u32Value);
3871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3875 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 break;
3879
3880 case IEMMODE_64BIT:
3881 IEM_MC_BEGIN(0, 2);
3882 IEM_MC_LOCAL(uint64_t, u64Value);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3887 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 break;
3891 }
3892 }
3893 return VINF_SUCCESS;
3894}
3895
3896
3897/**
3898 * @opcode 0x8a
3899 */
3900FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3901{
3902 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3903
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905
3906 /*
3907 * If rm is denoting a register, no more instruction bytes.
3908 */
3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3910 {
3911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3912 IEM_MC_BEGIN(0, 1);
3913 IEM_MC_LOCAL(uint8_t, u8Value);
3914 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3915 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3916 IEM_MC_ADVANCE_RIP();
3917 IEM_MC_END();
3918 }
3919 else
3920 {
3921 /*
3922 * We're loading a register from memory.
3923 */
3924 IEM_MC_BEGIN(0, 2);
3925 IEM_MC_LOCAL(uint8_t, u8Value);
3926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3929 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3930 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 }
3934 return VINF_SUCCESS;
3935}
3936
3937
3938/**
3939 * @opcode 0x8b
3940 */
3941FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3942{
3943 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3944
3945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3946
3947 /*
3948 * If rm is denoting a register, no more instruction bytes.
3949 */
3950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3951 {
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953 switch (pVCpu->iem.s.enmEffOpSize)
3954 {
3955 case IEMMODE_16BIT:
3956 IEM_MC_BEGIN(0, 1);
3957 IEM_MC_LOCAL(uint16_t, u16Value);
3958 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3959 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3960 IEM_MC_ADVANCE_RIP();
3961 IEM_MC_END();
3962 break;
3963
3964 case IEMMODE_32BIT:
3965 IEM_MC_BEGIN(0, 1);
3966 IEM_MC_LOCAL(uint32_t, u32Value);
3967 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 break;
3972
3973 case IEMMODE_64BIT:
3974 IEM_MC_BEGIN(0, 1);
3975 IEM_MC_LOCAL(uint64_t, u64Value);
3976 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3977 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 break;
3981 }
3982 }
3983 else
3984 {
3985 /*
3986 * We're loading a register from memory.
3987 */
3988 switch (pVCpu->iem.s.enmEffOpSize)
3989 {
3990 case IEMMODE_16BIT:
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(uint16_t, u16Value);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 break;
4001
4002 case IEMMODE_32BIT:
4003 IEM_MC_BEGIN(0, 2);
4004 IEM_MC_LOCAL(uint32_t, u32Value);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4009 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 break;
4013
4014 case IEMMODE_64BIT:
4015 IEM_MC_BEGIN(0, 2);
4016 IEM_MC_LOCAL(uint64_t, u64Value);
4017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4021 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 break;
4025 }
4026 }
4027 return VINF_SUCCESS;
4028}
4029
4030
4031/**
4032 * opcode 0x63
4033 * @todo Table fixme
4034 */
4035FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4036{
4037 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4038 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4039 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4040 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4041 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4042}
4043
4044
4045/**
4046 * @opcode 0x8c
4047 */
4048FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4049{
4050 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4051
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053
4054 /*
4055 * Check that the destination register exists. The REX.R prefix is ignored.
4056 */
4057 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4058 if ( iSegReg > X86_SREG_GS)
4059 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4060
4061 /*
4062 * If rm is denoting a register, no more instruction bytes.
4063 * In that case, the operand size is respected and the upper bits are
4064 * cleared (starting with some pentium).
4065 */
4066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4067 {
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069 switch (pVCpu->iem.s.enmEffOpSize)
4070 {
4071 case IEMMODE_16BIT:
4072 IEM_MC_BEGIN(0, 1);
4073 IEM_MC_LOCAL(uint16_t, u16Value);
4074 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4075 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 break;
4079
4080 case IEMMODE_32BIT:
4081 IEM_MC_BEGIN(0, 1);
4082 IEM_MC_LOCAL(uint32_t, u32Value);
4083 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4084 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 break;
4088
4089 case IEMMODE_64BIT:
4090 IEM_MC_BEGIN(0, 1);
4091 IEM_MC_LOCAL(uint64_t, u64Value);
4092 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4093 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4094 IEM_MC_ADVANCE_RIP();
4095 IEM_MC_END();
4096 break;
4097 }
4098 }
4099 else
4100 {
4101 /*
4102 * We're saving the register to memory. The access is word sized
4103 * regardless of operand size prefixes.
4104 */
4105#if 0 /* not necessary */
4106 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4107#endif
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(uint16_t, u16Value);
4110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4114 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 return VINF_SUCCESS;
4119}
4120
4121
4122
4123
4124/**
4125 * @opcode 0x8d
4126 */
4127FNIEMOP_DEF(iemOp_lea_Gv_M)
4128{
4129 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4132 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4133
4134 switch (pVCpu->iem.s.enmEffOpSize)
4135 {
4136 case IEMMODE_16BIT:
4137 IEM_MC_BEGIN(0, 2);
4138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4139 IEM_MC_LOCAL(uint16_t, u16Cast);
4140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4142 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4143 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 return VINF_SUCCESS;
4147
4148 case IEMMODE_32BIT:
4149 IEM_MC_BEGIN(0, 2);
4150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4151 IEM_MC_LOCAL(uint32_t, u32Cast);
4152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 return VINF_SUCCESS;
4159
4160 case IEMMODE_64BIT:
4161 IEM_MC_BEGIN(0, 1);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4166 IEM_MC_ADVANCE_RIP();
4167 IEM_MC_END();
4168 return VINF_SUCCESS;
4169 }
4170 AssertFailedReturn(VERR_IEM_IPE_7);
4171}
4172
4173
4174/**
4175 * @opcode 0x8e
4176 */
4177FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4178{
4179 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4180
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182
4183 /*
4184 * The practical operand size is 16-bit.
4185 */
4186#if 0 /* not necessary */
4187 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4188#endif
4189
4190 /*
4191 * Check that the destination register exists and can be used with this
4192 * instruction. The REX.R prefix is ignored.
4193 */
4194 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4195 if ( iSegReg == X86_SREG_CS
4196 || iSegReg > X86_SREG_GS)
4197 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4198
4199 /*
4200 * If rm is denoting a register, no more instruction bytes.
4201 */
4202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4203 {
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_BEGIN(2, 0);
4206 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4207 IEM_MC_ARG(uint16_t, u16Value, 1);
4208 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4209 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4210 IEM_MC_END();
4211 }
4212 else
4213 {
4214 /*
4215 * We're loading the register from memory. The access is word sized
4216 * regardless of operand size prefixes.
4217 */
4218 IEM_MC_BEGIN(2, 1);
4219 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4220 IEM_MC_ARG(uint16_t, u16Value, 1);
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4225 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4226 IEM_MC_END();
4227 }
4228 return VINF_SUCCESS;
4229}
4230
4231
4232/** Opcode 0x8f /0. */
4233FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4234{
4235 /* This bugger is rather annoying as it requires rSP to be updated before
4236 doing the effective address calculations. Will eventually require a
4237 split between the R/M+SIB decoding and the effective address
4238 calculation - which is something that is required for any attempt at
4239 reusing this code for a recompiler. It may also be good to have if we
4240 need to delay #UD exception caused by invalid lock prefixes.
4241
4242 For now, we'll do a mostly safe interpreter-only implementation here. */
4243 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4244 * now until tests show it's checked.. */
4245 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4246
4247 /* Register access is relatively easy and can share code. */
4248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4249 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4250
4251 /*
4252 * Memory target.
4253 *
4254 * Intel says that RSP is incremented before it's used in any effective
4255 * address calcuations. This means some serious extra annoyance here since
4256 * we decode and calculate the effective address in one step and like to
4257 * delay committing registers till everything is done.
4258 *
4259 * So, we'll decode and calculate the effective address twice. This will
4260 * require some recoding if turned into a recompiler.
4261 */
4262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4263
4264#ifndef TST_IEM_CHECK_MC
4265 /* Calc effective address with modified ESP. */
4266/** @todo testcase */
4267 RTGCPTR GCPtrEff;
4268 VBOXSTRICTRC rcStrict;
4269 switch (pVCpu->iem.s.enmEffOpSize)
4270 {
4271 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4272 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4273 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4275 }
4276 if (rcStrict != VINF_SUCCESS)
4277 return rcStrict;
4278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4279
4280 /* Perform the operation - this should be CImpl. */
4281 RTUINT64U TmpRsp;
4282 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4283 switch (pVCpu->iem.s.enmEffOpSize)
4284 {
4285 case IEMMODE_16BIT:
4286 {
4287 uint16_t u16Value;
4288 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4289 if (rcStrict == VINF_SUCCESS)
4290 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4291 break;
4292 }
4293
4294 case IEMMODE_32BIT:
4295 {
4296 uint32_t u32Value;
4297 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4298 if (rcStrict == VINF_SUCCESS)
4299 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4300 break;
4301 }
4302
4303 case IEMMODE_64BIT:
4304 {
4305 uint64_t u64Value;
4306 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4307 if (rcStrict == VINF_SUCCESS)
4308 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4309 break;
4310 }
4311
4312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4313 }
4314 if (rcStrict == VINF_SUCCESS)
4315 {
4316 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4317 iemRegUpdateRipAndClearRF(pVCpu);
4318 }
4319 return rcStrict;
4320
4321#else
4322 return VERR_IEM_IPE_2;
4323#endif
4324}
4325
4326
4327/**
4328 * @opcode 0x8f
4329 */
4330FNIEMOP_DEF(iemOp_Grp1A__xop)
4331{
4332 /*
4333 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4334 * three byte VEX prefix, except that the mmmmm field cannot have the values
4335 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4336 */
4337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4338 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4339 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4340
4341 IEMOP_MNEMONIC(xop, "xop");
4342 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4343 {
4344 /** @todo Test when exctly the XOP conformance checks kick in during
4345 * instruction decoding and fetching (using \#PF). */
4346 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4347 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4348 if ( ( pVCpu->iem.s.fPrefixes
4349 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4350 == 0)
4351 {
4352 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4353 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4355 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4356 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4357 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4358 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4359 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4360 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4361
4362 /** @todo XOP: Just use new tables and decoders. */
4363 switch (bRm & 0x1f)
4364 {
4365 case 8: /* xop opcode map 8. */
4366 IEMOP_BITCH_ABOUT_STUB();
4367 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4368
4369 case 9: /* xop opcode map 9. */
4370 IEMOP_BITCH_ABOUT_STUB();
4371 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4372
4373 case 10: /* xop opcode map 10. */
4374 IEMOP_BITCH_ABOUT_STUB();
4375 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4376
4377 default:
4378 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4379 return IEMOP_RAISE_INVALID_OPCODE();
4380 }
4381 }
4382 else
4383 Log(("XOP: Invalid prefix mix!\n"));
4384 }
4385 else
4386 Log(("XOP: XOP support disabled!\n"));
4387 return IEMOP_RAISE_INVALID_OPCODE();
4388}
4389
4390
4391/**
4392 * Common 'xchg reg,rAX' helper.
4393 */
4394FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4395{
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397
4398 iReg |= pVCpu->iem.s.uRexB;
4399 switch (pVCpu->iem.s.enmEffOpSize)
4400 {
4401 case IEMMODE_16BIT:
4402 IEM_MC_BEGIN(0, 2);
4403 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4404 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4405 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4406 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4407 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4408 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4409 IEM_MC_ADVANCE_RIP();
4410 IEM_MC_END();
4411 return VINF_SUCCESS;
4412
4413 case IEMMODE_32BIT:
4414 IEM_MC_BEGIN(0, 2);
4415 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4416 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4417 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4418 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4419 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4420 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4421 IEM_MC_ADVANCE_RIP();
4422 IEM_MC_END();
4423 return VINF_SUCCESS;
4424
4425 case IEMMODE_64BIT:
4426 IEM_MC_BEGIN(0, 2);
4427 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4428 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4429 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4430 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4431 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4432 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4433 IEM_MC_ADVANCE_RIP();
4434 IEM_MC_END();
4435 return VINF_SUCCESS;
4436
4437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4438 }
4439}
4440
4441
4442/**
4443 * @opcode 0x90
4444 */
4445FNIEMOP_DEF(iemOp_nop)
4446{
4447 /* R8/R8D and RAX/EAX can be exchanged. */
4448 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4449 {
4450 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4451 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4452 }
4453
4454 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4455 {
4456 IEMOP_MNEMONIC(pause, "pause");
4457#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4458 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4459 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4460#endif
4461#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4462 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4463 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4464#endif
4465 }
4466 else
4467 IEMOP_MNEMONIC(nop, "nop");
4468 IEM_MC_BEGIN(0, 0);
4469 IEM_MC_ADVANCE_RIP();
4470 IEM_MC_END();
4471 return VINF_SUCCESS;
4472}
4473
4474
4475/**
4476 * @opcode 0x91
4477 */
4478FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4479{
4480 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4481 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4482}
4483
4484
4485/**
4486 * @opcode 0x92
4487 */
4488FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4489{
4490 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4491 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4492}
4493
4494
4495/**
4496 * @opcode 0x93
4497 */
4498FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4499{
4500 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4501 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4502}
4503
4504
4505/**
4506 * @opcode 0x94
4507 */
4508FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4509{
4510 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4511 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4512}
4513
4514
4515/**
4516 * @opcode 0x95
4517 */
4518FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4519{
4520 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4521 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4522}
4523
4524
4525/**
4526 * @opcode 0x96
4527 */
4528FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4529{
4530 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4531 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4532}
4533
4534
4535/**
4536 * @opcode 0x97
4537 */
4538FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4539{
4540 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4541 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4542}
4543
4544
4545/**
4546 * @opcode 0x98
4547 */
4548FNIEMOP_DEF(iemOp_cbw)
4549{
4550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4551 switch (pVCpu->iem.s.enmEffOpSize)
4552 {
4553 case IEMMODE_16BIT:
4554 IEMOP_MNEMONIC(cbw, "cbw");
4555 IEM_MC_BEGIN(0, 1);
4556 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4557 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4558 } IEM_MC_ELSE() {
4559 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4560 } IEM_MC_ENDIF();
4561 IEM_MC_ADVANCE_RIP();
4562 IEM_MC_END();
4563 return VINF_SUCCESS;
4564
4565 case IEMMODE_32BIT:
4566 IEMOP_MNEMONIC(cwde, "cwde");
4567 IEM_MC_BEGIN(0, 1);
4568 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4569 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4570 } IEM_MC_ELSE() {
4571 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4572 } IEM_MC_ENDIF();
4573 IEM_MC_ADVANCE_RIP();
4574 IEM_MC_END();
4575 return VINF_SUCCESS;
4576
4577 case IEMMODE_64BIT:
4578 IEMOP_MNEMONIC(cdqe, "cdqe");
4579 IEM_MC_BEGIN(0, 1);
4580 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4581 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4582 } IEM_MC_ELSE() {
4583 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4584 } IEM_MC_ENDIF();
4585 IEM_MC_ADVANCE_RIP();
4586 IEM_MC_END();
4587 return VINF_SUCCESS;
4588
4589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4590 }
4591}
4592
4593
4594/**
4595 * @opcode 0x99
4596 */
4597FNIEMOP_DEF(iemOp_cwd)
4598{
4599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4600 switch (pVCpu->iem.s.enmEffOpSize)
4601 {
4602 case IEMMODE_16BIT:
4603 IEMOP_MNEMONIC(cwd, "cwd");
4604 IEM_MC_BEGIN(0, 1);
4605 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4606 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4607 } IEM_MC_ELSE() {
4608 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4609 } IEM_MC_ENDIF();
4610 IEM_MC_ADVANCE_RIP();
4611 IEM_MC_END();
4612 return VINF_SUCCESS;
4613
4614 case IEMMODE_32BIT:
4615 IEMOP_MNEMONIC(cdq, "cdq");
4616 IEM_MC_BEGIN(0, 1);
4617 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4618 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4619 } IEM_MC_ELSE() {
4620 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4621 } IEM_MC_ENDIF();
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 return VINF_SUCCESS;
4625
4626 case IEMMODE_64BIT:
4627 IEMOP_MNEMONIC(cqo, "cqo");
4628 IEM_MC_BEGIN(0, 1);
4629 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4630 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4631 } IEM_MC_ELSE() {
4632 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4633 } IEM_MC_ENDIF();
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 return VINF_SUCCESS;
4637
4638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4639 }
4640}
4641
4642
4643/**
4644 * @opcode 0x9a
4645 */
4646FNIEMOP_DEF(iemOp_call_Ap)
4647{
4648 IEMOP_MNEMONIC(call_Ap, "call Ap");
4649 IEMOP_HLP_NO_64BIT();
4650
4651 /* Decode the far pointer address and pass it on to the far call C implementation. */
4652 uint32_t offSeg;
4653 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4654 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4655 else
4656 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4657 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4659 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4660}
4661
4662
4663/** Opcode 0x9b. (aka fwait) */
4664FNIEMOP_DEF(iemOp_wait)
4665{
4666 IEMOP_MNEMONIC(wait, "wait");
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4668
4669 IEM_MC_BEGIN(0, 0);
4670 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4671 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4672 IEM_MC_ADVANCE_RIP();
4673 IEM_MC_END();
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/**
4679 * @opcode 0x9c
4680 */
4681FNIEMOP_DEF(iemOp_pushf_Fv)
4682{
4683 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4686 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4687}
4688
4689
4690/**
4691 * @opcode 0x9d
4692 */
4693FNIEMOP_DEF(iemOp_popf_Fv)
4694{
4695 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4698 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4699}
4700
4701
4702/**
4703 * @opcode 0x9e
4704 */
4705FNIEMOP_DEF(iemOp_sahf)
4706{
4707 IEMOP_MNEMONIC(sahf, "sahf");
4708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4709 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4710 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4711 return IEMOP_RAISE_INVALID_OPCODE();
4712 IEM_MC_BEGIN(0, 2);
4713 IEM_MC_LOCAL(uint32_t, u32Flags);
4714 IEM_MC_LOCAL(uint32_t, EFlags);
4715 IEM_MC_FETCH_EFLAGS(EFlags);
4716 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4717 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4718 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4719 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4720 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4721 IEM_MC_COMMIT_EFLAGS(EFlags);
4722 IEM_MC_ADVANCE_RIP();
4723 IEM_MC_END();
4724 return VINF_SUCCESS;
4725}
4726
4727
4728/**
4729 * @opcode 0x9f
4730 */
4731FNIEMOP_DEF(iemOp_lahf)
4732{
4733 IEMOP_MNEMONIC(lahf, "lahf");
4734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4735 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4736 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4737 return IEMOP_RAISE_INVALID_OPCODE();
4738 IEM_MC_BEGIN(0, 1);
4739 IEM_MC_LOCAL(uint8_t, u8Flags);
4740 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4741 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4742 IEM_MC_ADVANCE_RIP();
4743 IEM_MC_END();
4744 return VINF_SUCCESS;
4745}
4746
4747
4748/**
4749 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4750 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4751 * prefixes. Will return on failures.
4752 * @param a_GCPtrMemOff The variable to store the offset in.
4753 */
4754#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4755 do \
4756 { \
4757 switch (pVCpu->iem.s.enmEffAddrMode) \
4758 { \
4759 case IEMMODE_16BIT: \
4760 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4761 break; \
4762 case IEMMODE_32BIT: \
4763 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4764 break; \
4765 case IEMMODE_64BIT: \
4766 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4767 break; \
4768 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4769 } \
4770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4771 } while (0)
4772
4773/**
4774 * @opcode 0xa0
4775 */
4776FNIEMOP_DEF(iemOp_mov_AL_Ob)
4777{
4778 /*
4779 * Get the offset and fend off lock prefixes.
4780 */
4781 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4782 RTGCPTR GCPtrMemOff;
4783 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4784
4785 /*
4786 * Fetch AL.
4787 */
4788 IEM_MC_BEGIN(0,1);
4789 IEM_MC_LOCAL(uint8_t, u8Tmp);
4790 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4791 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4792 IEM_MC_ADVANCE_RIP();
4793 IEM_MC_END();
4794 return VINF_SUCCESS;
4795}
4796
4797
4798/**
4799 * @opcode 0xa1
4800 */
4801FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4802{
4803 /*
4804 * Get the offset and fend off lock prefixes.
4805 */
4806 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4807 RTGCPTR GCPtrMemOff;
4808 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4809
4810 /*
4811 * Fetch rAX.
4812 */
4813 switch (pVCpu->iem.s.enmEffOpSize)
4814 {
4815 case IEMMODE_16BIT:
4816 IEM_MC_BEGIN(0,1);
4817 IEM_MC_LOCAL(uint16_t, u16Tmp);
4818 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4819 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4820 IEM_MC_ADVANCE_RIP();
4821 IEM_MC_END();
4822 return VINF_SUCCESS;
4823
4824 case IEMMODE_32BIT:
4825 IEM_MC_BEGIN(0,1);
4826 IEM_MC_LOCAL(uint32_t, u32Tmp);
4827 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4828 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 return VINF_SUCCESS;
4832
4833 case IEMMODE_64BIT:
4834 IEM_MC_BEGIN(0,1);
4835 IEM_MC_LOCAL(uint64_t, u64Tmp);
4836 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4837 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4838 IEM_MC_ADVANCE_RIP();
4839 IEM_MC_END();
4840 return VINF_SUCCESS;
4841
4842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4843 }
4844}
4845
4846
4847/**
4848 * @opcode 0xa2
4849 */
4850FNIEMOP_DEF(iemOp_mov_Ob_AL)
4851{
4852 /*
4853 * Get the offset and fend off lock prefixes.
4854 */
4855 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4856 RTGCPTR GCPtrMemOff;
4857 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4858
4859 /*
4860 * Store AL.
4861 */
4862 IEM_MC_BEGIN(0,1);
4863 IEM_MC_LOCAL(uint8_t, u8Tmp);
4864 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4865 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4866 IEM_MC_ADVANCE_RIP();
4867 IEM_MC_END();
4868 return VINF_SUCCESS;
4869}
4870
4871
4872/**
4873 * @opcode 0xa3
4874 */
4875FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4876{
4877 /*
4878 * Get the offset and fend off lock prefixes.
4879 */
4880 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4881 RTGCPTR GCPtrMemOff;
4882 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4883
4884 /*
4885 * Store rAX.
4886 */
4887 switch (pVCpu->iem.s.enmEffOpSize)
4888 {
4889 case IEMMODE_16BIT:
4890 IEM_MC_BEGIN(0,1);
4891 IEM_MC_LOCAL(uint16_t, u16Tmp);
4892 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4893 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4894 IEM_MC_ADVANCE_RIP();
4895 IEM_MC_END();
4896 return VINF_SUCCESS;
4897
4898 case IEMMODE_32BIT:
4899 IEM_MC_BEGIN(0,1);
4900 IEM_MC_LOCAL(uint32_t, u32Tmp);
4901 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4902 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4903 IEM_MC_ADVANCE_RIP();
4904 IEM_MC_END();
4905 return VINF_SUCCESS;
4906
4907 case IEMMODE_64BIT:
4908 IEM_MC_BEGIN(0,1);
4909 IEM_MC_LOCAL(uint64_t, u64Tmp);
4910 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4911 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4912 IEM_MC_ADVANCE_RIP();
4913 IEM_MC_END();
4914 return VINF_SUCCESS;
4915
4916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4917 }
4918}
4919
4920/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4921#define IEM_MOVS_CASE(ValBits, AddrBits) \
4922 IEM_MC_BEGIN(0, 2); \
4923 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4924 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4925 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4926 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4927 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4928 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4929 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4930 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4931 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4932 } IEM_MC_ELSE() { \
4933 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4934 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4935 } IEM_MC_ENDIF(); \
4936 IEM_MC_ADVANCE_RIP(); \
4937 IEM_MC_END();
4938
4939/**
4940 * @opcode 0xa4
4941 */
4942FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4943{
4944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4945
4946 /*
4947 * Use the C implementation if a repeat prefix is encountered.
4948 */
4949 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4950 {
4951 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4952 switch (pVCpu->iem.s.enmEffAddrMode)
4953 {
4954 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4955 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4956 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4958 }
4959 }
4960 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4961
4962 /*
4963 * Sharing case implementation with movs[wdq] below.
4964 */
4965 switch (pVCpu->iem.s.enmEffAddrMode)
4966 {
4967 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4968 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4969 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4971 }
4972 return VINF_SUCCESS;
4973}
4974
4975
4976/**
4977 * @opcode 0xa5
4978 */
4979FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4980{
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982
4983 /*
4984 * Use the C implementation if a repeat prefix is encountered.
4985 */
4986 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4987 {
4988 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4989 switch (pVCpu->iem.s.enmEffOpSize)
4990 {
4991 case IEMMODE_16BIT:
4992 switch (pVCpu->iem.s.enmEffAddrMode)
4993 {
4994 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4995 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4996 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4998 }
4999 break;
5000 case IEMMODE_32BIT:
5001 switch (pVCpu->iem.s.enmEffAddrMode)
5002 {
5003 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5004 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5005 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5007 }
5008 case IEMMODE_64BIT:
5009 switch (pVCpu->iem.s.enmEffAddrMode)
5010 {
5011 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5012 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5013 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5015 }
5016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5017 }
5018 }
5019 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5020
5021 /*
5022 * Annoying double switch here.
5023 * Using ugly macro for implementing the cases, sharing it with movsb.
5024 */
5025 switch (pVCpu->iem.s.enmEffOpSize)
5026 {
5027 case IEMMODE_16BIT:
5028 switch (pVCpu->iem.s.enmEffAddrMode)
5029 {
5030 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5031 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5032 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5034 }
5035 break;
5036
5037 case IEMMODE_32BIT:
5038 switch (pVCpu->iem.s.enmEffAddrMode)
5039 {
5040 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5041 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5042 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5044 }
5045 break;
5046
5047 case IEMMODE_64BIT:
5048 switch (pVCpu->iem.s.enmEffAddrMode)
5049 {
5050 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5051 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5052 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5054 }
5055 break;
5056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5057 }
5058 return VINF_SUCCESS;
5059}
5060
5061#undef IEM_MOVS_CASE
5062
5063/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5064#define IEM_CMPS_CASE(ValBits, AddrBits) \
5065 IEM_MC_BEGIN(3, 3); \
5066 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5067 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5068 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5069 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5070 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5071 \
5072 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5073 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5074 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5075 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5076 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5077 IEM_MC_REF_EFLAGS(pEFlags); \
5078 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5079 \
5080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5081 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5082 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5083 } IEM_MC_ELSE() { \
5084 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5085 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5086 } IEM_MC_ENDIF(); \
5087 IEM_MC_ADVANCE_RIP(); \
5088 IEM_MC_END(); \
5089
5090/**
5091 * @opcode 0xa6
5092 */
5093FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5094{
5095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5096
5097 /*
5098 * Use the C implementation if a repeat prefix is encountered.
5099 */
5100 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5101 {
5102 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5103 switch (pVCpu->iem.s.enmEffAddrMode)
5104 {
5105 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5106 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5107 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5109 }
5110 }
5111 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5112 {
5113 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5114 switch (pVCpu->iem.s.enmEffAddrMode)
5115 {
5116 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5117 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5118 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5120 }
5121 }
5122 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5123
5124 /*
5125 * Sharing case implementation with cmps[wdq] below.
5126 */
5127 switch (pVCpu->iem.s.enmEffAddrMode)
5128 {
5129 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5130 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5131 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5133 }
5134 return VINF_SUCCESS;
5135
5136}
5137
5138
5139/**
5140 * @opcode 0xa7
5141 */
5142FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5143{
5144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5145
5146 /*
5147 * Use the C implementation if a repeat prefix is encountered.
5148 */
5149 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5150 {
5151 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5152 switch (pVCpu->iem.s.enmEffOpSize)
5153 {
5154 case IEMMODE_16BIT:
5155 switch (pVCpu->iem.s.enmEffAddrMode)
5156 {
5157 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5158 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5159 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5161 }
5162 break;
5163 case IEMMODE_32BIT:
5164 switch (pVCpu->iem.s.enmEffAddrMode)
5165 {
5166 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5167 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5168 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5170 }
5171 case IEMMODE_64BIT:
5172 switch (pVCpu->iem.s.enmEffAddrMode)
5173 {
5174 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5175 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5176 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5178 }
5179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5180 }
5181 }
5182
5183 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5184 {
5185 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5186 switch (pVCpu->iem.s.enmEffOpSize)
5187 {
5188 case IEMMODE_16BIT:
5189 switch (pVCpu->iem.s.enmEffAddrMode)
5190 {
5191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5195 }
5196 break;
5197 case IEMMODE_32BIT:
5198 switch (pVCpu->iem.s.enmEffAddrMode)
5199 {
5200 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5201 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5202 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5204 }
5205 case IEMMODE_64BIT:
5206 switch (pVCpu->iem.s.enmEffAddrMode)
5207 {
5208 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5209 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5210 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5214 }
5215 }
5216
5217 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5218
5219 /*
5220 * Annoying double switch here.
5221 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5222 */
5223 switch (pVCpu->iem.s.enmEffOpSize)
5224 {
5225 case IEMMODE_16BIT:
5226 switch (pVCpu->iem.s.enmEffAddrMode)
5227 {
5228 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5229 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5230 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5232 }
5233 break;
5234
5235 case IEMMODE_32BIT:
5236 switch (pVCpu->iem.s.enmEffAddrMode)
5237 {
5238 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5239 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5240 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5242 }
5243 break;
5244
5245 case IEMMODE_64BIT:
5246 switch (pVCpu->iem.s.enmEffAddrMode)
5247 {
5248 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5249 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5250 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5252 }
5253 break;
5254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5255 }
5256 return VINF_SUCCESS;
5257
5258}
5259
5260#undef IEM_CMPS_CASE
5261
5262/**
5263 * @opcode 0xa8
5264 */
5265FNIEMOP_DEF(iemOp_test_AL_Ib)
5266{
5267 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5269 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5270}
5271
5272
5273/**
5274 * @opcode 0xa9
5275 */
5276FNIEMOP_DEF(iemOp_test_eAX_Iz)
5277{
5278 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5280 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5281}
5282
5283
5284/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5285#define IEM_STOS_CASE(ValBits, AddrBits) \
5286 IEM_MC_BEGIN(0, 2); \
5287 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5288 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5289 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5290 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5291 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5292 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5293 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5294 } IEM_MC_ELSE() { \
5295 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5296 } IEM_MC_ENDIF(); \
5297 IEM_MC_ADVANCE_RIP(); \
5298 IEM_MC_END(); \
5299
5300/**
5301 * @opcode 0xaa
5302 */
5303FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5304{
5305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5306
5307 /*
5308 * Use the C implementation if a repeat prefix is encountered.
5309 */
5310 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5311 {
5312 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5313 switch (pVCpu->iem.s.enmEffAddrMode)
5314 {
5315 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5316 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5317 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5319 }
5320 }
5321 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5322
5323 /*
5324 * Sharing case implementation with stos[wdq] below.
5325 */
5326 switch (pVCpu->iem.s.enmEffAddrMode)
5327 {
5328 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5329 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5330 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5332 }
5333 return VINF_SUCCESS;
5334}
5335
5336
5337/**
5338 * @opcode 0xab
5339 */
5340FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5341{
5342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5343
5344 /*
5345 * Use the C implementation if a repeat prefix is encountered.
5346 */
5347 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5348 {
5349 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5350 switch (pVCpu->iem.s.enmEffOpSize)
5351 {
5352 case IEMMODE_16BIT:
5353 switch (pVCpu->iem.s.enmEffAddrMode)
5354 {
5355 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5356 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5357 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5359 }
5360 break;
5361 case IEMMODE_32BIT:
5362 switch (pVCpu->iem.s.enmEffAddrMode)
5363 {
5364 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5365 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5366 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5368 }
5369 case IEMMODE_64BIT:
5370 switch (pVCpu->iem.s.enmEffAddrMode)
5371 {
5372 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5373 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5374 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5378 }
5379 }
5380 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5381
5382 /*
5383 * Annoying double switch here.
5384 * Using ugly macro for implementing the cases, sharing it with stosb.
5385 */
5386 switch (pVCpu->iem.s.enmEffOpSize)
5387 {
5388 case IEMMODE_16BIT:
5389 switch (pVCpu->iem.s.enmEffAddrMode)
5390 {
5391 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5392 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5393 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5395 }
5396 break;
5397
5398 case IEMMODE_32BIT:
5399 switch (pVCpu->iem.s.enmEffAddrMode)
5400 {
5401 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5402 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5403 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5405 }
5406 break;
5407
5408 case IEMMODE_64BIT:
5409 switch (pVCpu->iem.s.enmEffAddrMode)
5410 {
5411 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5412 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5413 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5415 }
5416 break;
5417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5418 }
5419 return VINF_SUCCESS;
5420}
5421
5422#undef IEM_STOS_CASE
5423
5424/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5425#define IEM_LODS_CASE(ValBits, AddrBits) \
5426 IEM_MC_BEGIN(0, 2); \
5427 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5428 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5429 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5430 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5431 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5432 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5433 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5434 } IEM_MC_ELSE() { \
5435 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5436 } IEM_MC_ENDIF(); \
5437 IEM_MC_ADVANCE_RIP(); \
5438 IEM_MC_END();
5439
5440/**
5441 * @opcode 0xac
5442 */
5443FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5444{
5445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5446
5447 /*
5448 * Use the C implementation if a repeat prefix is encountered.
5449 */
5450 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5451 {
5452 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5453 switch (pVCpu->iem.s.enmEffAddrMode)
5454 {
5455 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5456 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5457 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5459 }
5460 }
5461 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5462
5463 /*
5464 * Sharing case implementation with stos[wdq] below.
5465 */
5466 switch (pVCpu->iem.s.enmEffAddrMode)
5467 {
5468 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5469 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5470 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5472 }
5473 return VINF_SUCCESS;
5474}
5475
5476
5477/**
5478 * @opcode 0xad
5479 */
5480FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5481{
5482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5483
5484 /*
5485 * Use the C implementation if a repeat prefix is encountered.
5486 */
5487 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5488 {
5489 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5490 switch (pVCpu->iem.s.enmEffOpSize)
5491 {
5492 case IEMMODE_16BIT:
5493 switch (pVCpu->iem.s.enmEffAddrMode)
5494 {
5495 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5496 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5497 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5499 }
5500 break;
5501 case IEMMODE_32BIT:
5502 switch (pVCpu->iem.s.enmEffAddrMode)
5503 {
5504 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5505 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5506 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5508 }
5509 case IEMMODE_64BIT:
5510 switch (pVCpu->iem.s.enmEffAddrMode)
5511 {
5512 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5513 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5514 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5516 }
5517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5518 }
5519 }
5520 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5521
5522 /*
5523 * Annoying double switch here.
5524 * Using ugly macro for implementing the cases, sharing it with lodsb.
5525 */
5526 switch (pVCpu->iem.s.enmEffOpSize)
5527 {
5528 case IEMMODE_16BIT:
5529 switch (pVCpu->iem.s.enmEffAddrMode)
5530 {
5531 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5532 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5533 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5535 }
5536 break;
5537
5538 case IEMMODE_32BIT:
5539 switch (pVCpu->iem.s.enmEffAddrMode)
5540 {
5541 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5542 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5543 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5545 }
5546 break;
5547
5548 case IEMMODE_64BIT:
5549 switch (pVCpu->iem.s.enmEffAddrMode)
5550 {
5551 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5552 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5553 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5555 }
5556 break;
5557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5558 }
5559 return VINF_SUCCESS;
5560}
5561
5562#undef IEM_LODS_CASE
5563
5564/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5565#define IEM_SCAS_CASE(ValBits, AddrBits) \
5566 IEM_MC_BEGIN(3, 2); \
5567 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5568 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5569 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5570 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5571 \
5572 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5573 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5574 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5575 IEM_MC_REF_EFLAGS(pEFlags); \
5576 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5577 \
5578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5579 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5580 } IEM_MC_ELSE() { \
5581 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5582 } IEM_MC_ENDIF(); \
5583 IEM_MC_ADVANCE_RIP(); \
5584 IEM_MC_END();
5585
5586/**
5587 * @opcode 0xae
5588 */
5589FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5590{
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592
5593 /*
5594 * Use the C implementation if a repeat prefix is encountered.
5595 */
5596 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5597 {
5598 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5599 switch (pVCpu->iem.s.enmEffAddrMode)
5600 {
5601 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5602 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5603 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5605 }
5606 }
5607 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5608 {
5609 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5610 switch (pVCpu->iem.s.enmEffAddrMode)
5611 {
5612 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5613 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5614 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5616 }
5617 }
5618 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5619
5620 /*
5621 * Sharing case implementation with stos[wdq] below.
5622 */
5623 switch (pVCpu->iem.s.enmEffAddrMode)
5624 {
5625 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5626 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5627 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5629 }
5630 return VINF_SUCCESS;
5631}
5632
5633
5634/**
5635 * @opcode 0xaf
5636 */
5637FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5638{
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640
5641 /*
5642 * Use the C implementation if a repeat prefix is encountered.
5643 */
5644 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5645 {
5646 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5647 switch (pVCpu->iem.s.enmEffOpSize)
5648 {
5649 case IEMMODE_16BIT:
5650 switch (pVCpu->iem.s.enmEffAddrMode)
5651 {
5652 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5653 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5654 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5656 }
5657 break;
5658 case IEMMODE_32BIT:
5659 switch (pVCpu->iem.s.enmEffAddrMode)
5660 {
5661 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5662 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5663 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5665 }
5666 case IEMMODE_64BIT:
5667 switch (pVCpu->iem.s.enmEffAddrMode)
5668 {
5669 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5670 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5671 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5673 }
5674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5675 }
5676 }
5677 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5678 {
5679 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5680 switch (pVCpu->iem.s.enmEffOpSize)
5681 {
5682 case IEMMODE_16BIT:
5683 switch (pVCpu->iem.s.enmEffAddrMode)
5684 {
5685 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5689 }
5690 break;
5691 case IEMMODE_32BIT:
5692 switch (pVCpu->iem.s.enmEffAddrMode)
5693 {
5694 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5695 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5696 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5698 }
5699 case IEMMODE_64BIT:
5700 switch (pVCpu->iem.s.enmEffAddrMode)
5701 {
5702 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5703 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5704 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5706 }
5707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5708 }
5709 }
5710 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5711
5712 /*
5713 * Annoying double switch here.
5714 * Using ugly macro for implementing the cases, sharing it with scasb.
5715 */
5716 switch (pVCpu->iem.s.enmEffOpSize)
5717 {
5718 case IEMMODE_16BIT:
5719 switch (pVCpu->iem.s.enmEffAddrMode)
5720 {
5721 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5722 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5723 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5725 }
5726 break;
5727
5728 case IEMMODE_32BIT:
5729 switch (pVCpu->iem.s.enmEffAddrMode)
5730 {
5731 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5732 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5733 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5735 }
5736 break;
5737
5738 case IEMMODE_64BIT:
5739 switch (pVCpu->iem.s.enmEffAddrMode)
5740 {
5741 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5742 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5743 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5745 }
5746 break;
5747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5748 }
5749 return VINF_SUCCESS;
5750}
5751
5752#undef IEM_SCAS_CASE
5753
5754/**
5755 * Common 'mov r8, imm8' helper.
5756 */
5757FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5758{
5759 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5761
5762 IEM_MC_BEGIN(0, 1);
5763 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5764 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5765 IEM_MC_ADVANCE_RIP();
5766 IEM_MC_END();
5767
5768 return VINF_SUCCESS;
5769}
5770
5771
5772/**
5773 * @opcode 0xb0
5774 */
5775FNIEMOP_DEF(iemOp_mov_AL_Ib)
5776{
5777 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5778 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5779}
5780
5781
5782/**
5783 * @opcode 0xb1
5784 */
5785FNIEMOP_DEF(iemOp_CL_Ib)
5786{
5787 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5788 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5789}
5790
5791
5792/**
5793 * @opcode 0xb2
5794 */
5795FNIEMOP_DEF(iemOp_DL_Ib)
5796{
5797 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5798 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5799}
5800
5801
5802/**
5803 * @opcode 0xb3
5804 */
5805FNIEMOP_DEF(iemOp_BL_Ib)
5806{
5807 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5808 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5809}
5810
5811
5812/**
5813 * @opcode 0xb4
5814 */
5815FNIEMOP_DEF(iemOp_mov_AH_Ib)
5816{
5817 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5818 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5819}
5820
5821
5822/**
5823 * @opcode 0xb5
5824 */
5825FNIEMOP_DEF(iemOp_CH_Ib)
5826{
5827 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5828 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5829}
5830
5831
5832/**
5833 * @opcode 0xb6
5834 */
5835FNIEMOP_DEF(iemOp_DH_Ib)
5836{
5837 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5838 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5839}
5840
5841
5842/**
5843 * @opcode 0xb7
5844 */
5845FNIEMOP_DEF(iemOp_BH_Ib)
5846{
5847 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5848 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5849}
5850
5851
5852/**
5853 * Common 'mov regX,immX' helper.
5854 */
5855FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5856{
5857 switch (pVCpu->iem.s.enmEffOpSize)
5858 {
5859 case IEMMODE_16BIT:
5860 {
5861 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863
5864 IEM_MC_BEGIN(0, 1);
5865 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5866 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5867 IEM_MC_ADVANCE_RIP();
5868 IEM_MC_END();
5869 break;
5870 }
5871
5872 case IEMMODE_32BIT:
5873 {
5874 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876
5877 IEM_MC_BEGIN(0, 1);
5878 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5879 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5880 IEM_MC_ADVANCE_RIP();
5881 IEM_MC_END();
5882 break;
5883 }
5884 case IEMMODE_64BIT:
5885 {
5886 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5888
5889 IEM_MC_BEGIN(0, 1);
5890 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5891 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5892 IEM_MC_ADVANCE_RIP();
5893 IEM_MC_END();
5894 break;
5895 }
5896 }
5897
5898 return VINF_SUCCESS;
5899}
5900
5901
5902/**
5903 * @opcode 0xb8
5904 */
5905FNIEMOP_DEF(iemOp_eAX_Iv)
5906{
5907 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5908 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5909}
5910
5911
5912/**
5913 * @opcode 0xb9
5914 */
5915FNIEMOP_DEF(iemOp_eCX_Iv)
5916{
5917 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5918 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5919}
5920
5921
5922/**
5923 * @opcode 0xba
5924 */
5925FNIEMOP_DEF(iemOp_eDX_Iv)
5926{
5927 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5928 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5929}
5930
5931
5932/**
5933 * @opcode 0xbb
5934 */
5935FNIEMOP_DEF(iemOp_eBX_Iv)
5936{
5937 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5938 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5939}
5940
5941
5942/**
5943 * @opcode 0xbc
5944 */
5945FNIEMOP_DEF(iemOp_eSP_Iv)
5946{
5947 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5948 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5949}
5950
5951
5952/**
5953 * @opcode 0xbd
5954 */
5955FNIEMOP_DEF(iemOp_eBP_Iv)
5956{
5957 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5958 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5959}
5960
5961
5962/**
5963 * @opcode 0xbe
5964 */
5965FNIEMOP_DEF(iemOp_eSI_Iv)
5966{
5967 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5968 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5969}
5970
5971
5972/**
5973 * @opcode 0xbf
5974 */
5975FNIEMOP_DEF(iemOp_eDI_Iv)
5976{
5977 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5978 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5979}
5980
5981
5982/**
5983 * @opcode 0xc0
5984 */
5985FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5986{
5987 IEMOP_HLP_MIN_186();
5988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5989 PCIEMOPSHIFTSIZES pImpl;
5990 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5991 {
5992 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5993 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5994 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5995 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5996 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5997 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5998 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5999 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6001 }
6002 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6003
6004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6005 {
6006 /* register */
6007 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6009 IEM_MC_BEGIN(3, 0);
6010 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6011 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6012 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6013 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6014 IEM_MC_REF_EFLAGS(pEFlags);
6015 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6016 IEM_MC_ADVANCE_RIP();
6017 IEM_MC_END();
6018 }
6019 else
6020 {
6021 /* memory */
6022 IEM_MC_BEGIN(3, 2);
6023 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6024 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6025 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6027
6028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6029 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6030 IEM_MC_ASSIGN(cShiftArg, cShift);
6031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6032 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6033 IEM_MC_FETCH_EFLAGS(EFlags);
6034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6035
6036 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6037 IEM_MC_COMMIT_EFLAGS(EFlags);
6038 IEM_MC_ADVANCE_RIP();
6039 IEM_MC_END();
6040 }
6041 return VINF_SUCCESS;
6042}
6043
6044
6045/**
6046 * @opcode 0xc1
6047 */
6048FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6049{
6050 IEMOP_HLP_MIN_186();
6051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6052 PCIEMOPSHIFTSIZES pImpl;
6053 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6054 {
6055 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6056 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6057 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6058 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6059 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6060 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6061 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6062 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6064 }
6065 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6066
6067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6068 {
6069 /* register */
6070 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072 switch (pVCpu->iem.s.enmEffOpSize)
6073 {
6074 case IEMMODE_16BIT:
6075 IEM_MC_BEGIN(3, 0);
6076 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6077 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6078 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6079 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6080 IEM_MC_REF_EFLAGS(pEFlags);
6081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6082 IEM_MC_ADVANCE_RIP();
6083 IEM_MC_END();
6084 return VINF_SUCCESS;
6085
6086 case IEMMODE_32BIT:
6087 IEM_MC_BEGIN(3, 0);
6088 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6089 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6090 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6091 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6092 IEM_MC_REF_EFLAGS(pEFlags);
6093 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6094 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 return VINF_SUCCESS;
6098
6099 case IEMMODE_64BIT:
6100 IEM_MC_BEGIN(3, 0);
6101 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6102 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6104 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6105 IEM_MC_REF_EFLAGS(pEFlags);
6106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6107 IEM_MC_ADVANCE_RIP();
6108 IEM_MC_END();
6109 return VINF_SUCCESS;
6110
6111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6112 }
6113 }
6114 else
6115 {
6116 /* memory */
6117 switch (pVCpu->iem.s.enmEffOpSize)
6118 {
6119 case IEMMODE_16BIT:
6120 IEM_MC_BEGIN(3, 2);
6121 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6122 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6123 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6125
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6127 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6128 IEM_MC_ASSIGN(cShiftArg, cShift);
6129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6130 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6131 IEM_MC_FETCH_EFLAGS(EFlags);
6132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6133
6134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6135 IEM_MC_COMMIT_EFLAGS(EFlags);
6136 IEM_MC_ADVANCE_RIP();
6137 IEM_MC_END();
6138 return VINF_SUCCESS;
6139
6140 case IEMMODE_32BIT:
6141 IEM_MC_BEGIN(3, 2);
6142 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6143 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6144 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6146
6147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6148 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6149 IEM_MC_ASSIGN(cShiftArg, cShift);
6150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6151 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6152 IEM_MC_FETCH_EFLAGS(EFlags);
6153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6154
6155 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6156 IEM_MC_COMMIT_EFLAGS(EFlags);
6157 IEM_MC_ADVANCE_RIP();
6158 IEM_MC_END();
6159 return VINF_SUCCESS;
6160
6161 case IEMMODE_64BIT:
6162 IEM_MC_BEGIN(3, 2);
6163 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6164 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6165 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6167
6168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6169 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6170 IEM_MC_ASSIGN(cShiftArg, cShift);
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6173 IEM_MC_FETCH_EFLAGS(EFlags);
6174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6175
6176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6177 IEM_MC_COMMIT_EFLAGS(EFlags);
6178 IEM_MC_ADVANCE_RIP();
6179 IEM_MC_END();
6180 return VINF_SUCCESS;
6181
6182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6183 }
6184 }
6185}
6186
6187
6188/**
6189 * @opcode 0xc2
6190 */
6191FNIEMOP_DEF(iemOp_retn_Iw)
6192{
6193 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6194 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6197 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6198}
6199
6200
6201/**
6202 * @opcode 0xc3
6203 */
6204FNIEMOP_DEF(iemOp_retn)
6205{
6206 IEMOP_MNEMONIC(retn, "retn");
6207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6209 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6210}
6211
6212
6213/**
6214 * @opcode 0xc4
6215 */
6216FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6217{
6218 /* The LDS instruction is invalid 64-bit mode. In legacy and
6219 compatability mode it is invalid with MOD=3.
6220 The use as a VEX prefix is made possible by assigning the inverted
6221 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6222 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6224 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6225 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
6226 {
6227 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6228 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6229 {
6230 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6231 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6232 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6233 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6234 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6235 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6237 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6238 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6239 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6240 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6241 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6242 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6243
6244 switch (bRm & 0x1f)
6245 {
6246 case 1: /* 0x0f lead opcode byte. */
6247#ifdef IEM_WITH_VEX
6248 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6249#else
6250 IEMOP_BITCH_ABOUT_STUB();
6251 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6252#endif
6253
6254 case 2: /* 0x0f 0x38 lead opcode bytes. */
6255#ifdef IEM_WITH_VEX
6256 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6257#else
6258 IEMOP_BITCH_ABOUT_STUB();
6259 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6260#endif
6261
6262 case 3: /* 0x0f 0x3a lead opcode bytes. */
6263#ifdef IEM_WITH_VEX
6264 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6265#else
6266 IEMOP_BITCH_ABOUT_STUB();
6267 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6268#endif
6269
6270 default:
6271 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6272 return IEMOP_RAISE_INVALID_OPCODE();
6273 }
6274 }
6275 Log(("VEX3: AVX support disabled!\n"));
6276 return IEMOP_RAISE_INVALID_OPCODE();
6277 }
6278
6279 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6280 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6281}
6282
6283
6284/**
6285 * @opcode 0xc5
6286 */
6287FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6288{
6289 /* The LES instruction is invalid 64-bit mode. In legacy and
6290 compatability mode it is invalid with MOD=3.
6291 The use as a VEX prefix is made possible by assigning the inverted
6292 REX.R to the top MOD bit, and the top bit in the inverted register
6293 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6294 to accessing registers 0..7 in this VEX form. */
6295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6296 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6297 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6298 {
6299 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6300 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6301 {
6302 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6303 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6304 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6306 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6307 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6308 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6309 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6310
6311#ifdef IEM_WITH_VEX
6312 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6313#else
6314 IEMOP_BITCH_ABOUT_STUB();
6315 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6316#endif
6317 }
6318
6319 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6320 Log(("VEX2: AVX support disabled!\n"));
6321 return IEMOP_RAISE_INVALID_OPCODE();
6322 }
6323
6324 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6325 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6326}
6327
6328
6329/**
6330 * @opcode 0xc6
6331 */
6332FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6333{
6334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6335 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6336 return IEMOP_RAISE_INVALID_OPCODE();
6337 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6338
6339 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6340 {
6341 /* register access */
6342 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6344 IEM_MC_BEGIN(0, 0);
6345 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6346 IEM_MC_ADVANCE_RIP();
6347 IEM_MC_END();
6348 }
6349 else
6350 {
6351 /* memory access. */
6352 IEM_MC_BEGIN(0, 1);
6353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6355 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6358 IEM_MC_ADVANCE_RIP();
6359 IEM_MC_END();
6360 }
6361 return VINF_SUCCESS;
6362}
6363
6364
6365/**
6366 * @opcode 0xc7
6367 */
6368FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6369{
6370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6371 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6372 return IEMOP_RAISE_INVALID_OPCODE();
6373 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6374
6375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6376 {
6377 /* register access */
6378 switch (pVCpu->iem.s.enmEffOpSize)
6379 {
6380 case IEMMODE_16BIT:
6381 IEM_MC_BEGIN(0, 0);
6382 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6385 IEM_MC_ADVANCE_RIP();
6386 IEM_MC_END();
6387 return VINF_SUCCESS;
6388
6389 case IEMMODE_32BIT:
6390 IEM_MC_BEGIN(0, 0);
6391 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6393 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6394 IEM_MC_ADVANCE_RIP();
6395 IEM_MC_END();
6396 return VINF_SUCCESS;
6397
6398 case IEMMODE_64BIT:
6399 IEM_MC_BEGIN(0, 0);
6400 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6402 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6403 IEM_MC_ADVANCE_RIP();
6404 IEM_MC_END();
6405 return VINF_SUCCESS;
6406
6407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6408 }
6409 }
6410 else
6411 {
6412 /* memory access. */
6413 switch (pVCpu->iem.s.enmEffOpSize)
6414 {
6415 case IEMMODE_16BIT:
6416 IEM_MC_BEGIN(0, 1);
6417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6419 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6422 IEM_MC_ADVANCE_RIP();
6423 IEM_MC_END();
6424 return VINF_SUCCESS;
6425
6426 case IEMMODE_32BIT:
6427 IEM_MC_BEGIN(0, 1);
6428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6430 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6432 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6433 IEM_MC_ADVANCE_RIP();
6434 IEM_MC_END();
6435 return VINF_SUCCESS;
6436
6437 case IEMMODE_64BIT:
6438 IEM_MC_BEGIN(0, 1);
6439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6441 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6443 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6444 IEM_MC_ADVANCE_RIP();
6445 IEM_MC_END();
6446 return VINF_SUCCESS;
6447
6448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6449 }
6450 }
6451}
6452
6453
6454
6455
6456/**
6457 * @opcode 0xc8
6458 */
6459FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6460{
6461 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6462 IEMOP_HLP_MIN_186();
6463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6464 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6465 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6468}
6469
6470
6471/**
6472 * @opcode 0xc9
6473 */
6474FNIEMOP_DEF(iemOp_leave)
6475{
6476 IEMOP_MNEMONIC(leave, "leave");
6477 IEMOP_HLP_MIN_186();
6478 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6481}
6482
6483
6484/**
6485 * @opcode 0xca
6486 */
6487FNIEMOP_DEF(iemOp_retf_Iw)
6488{
6489 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6490 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6492 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6493 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6494}
6495
6496
6497/**
6498 * @opcode 0xcb
6499 */
6500FNIEMOP_DEF(iemOp_retf)
6501{
6502 IEMOP_MNEMONIC(retf, "retf");
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6504 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6505 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6506}
6507
6508
6509/**
6510 * @opcode 0xcc
6511 */
6512FNIEMOP_DEF(iemOp_int3)
6513{
6514 IEMOP_MNEMONIC(int3, "int3");
6515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6516 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6517}
6518
6519
6520/**
6521 * @opcode 0xcd
6522 */
6523FNIEMOP_DEF(iemOp_int_Ib)
6524{
6525 IEMOP_MNEMONIC(int_Ib, "int Ib");
6526 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6528 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6529}
6530
6531
6532/**
6533 * @opcode 0xce
6534 */
6535FNIEMOP_DEF(iemOp_into)
6536{
6537 IEMOP_MNEMONIC(into, "into");
6538 IEMOP_HLP_NO_64BIT();
6539
6540 IEM_MC_BEGIN(2, 0);
6541 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6542 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6543 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6544 IEM_MC_END();
6545 return VINF_SUCCESS;
6546}
6547
6548
6549/**
6550 * @opcode 0xcf
6551 */
6552FNIEMOP_DEF(iemOp_iret)
6553{
6554 IEMOP_MNEMONIC(iret, "iret");
6555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6556 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6557}
6558
6559
6560/**
6561 * @opcode 0xd0
6562 */
6563FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6564{
6565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6566 PCIEMOPSHIFTSIZES pImpl;
6567 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6568 {
6569 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6570 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6571 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6572 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6573 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6574 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6575 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6576 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6577 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6578 }
6579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6580
6581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6582 {
6583 /* register */
6584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6585 IEM_MC_BEGIN(3, 0);
6586 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6587 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6588 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6589 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6590 IEM_MC_REF_EFLAGS(pEFlags);
6591 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6592 IEM_MC_ADVANCE_RIP();
6593 IEM_MC_END();
6594 }
6595 else
6596 {
6597 /* memory */
6598 IEM_MC_BEGIN(3, 2);
6599 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6600 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6601 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6603
6604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6606 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6607 IEM_MC_FETCH_EFLAGS(EFlags);
6608 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6609
6610 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6611 IEM_MC_COMMIT_EFLAGS(EFlags);
6612 IEM_MC_ADVANCE_RIP();
6613 IEM_MC_END();
6614 }
6615 return VINF_SUCCESS;
6616}
6617
6618
6619
6620/**
6621 * @opcode 0xd1
6622 */
6623FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6624{
6625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6626 PCIEMOPSHIFTSIZES pImpl;
6627 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6628 {
6629 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6630 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6631 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6632 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6633 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6634 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6635 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6636 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6637 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6638 }
6639 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6640
6641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6642 {
6643 /* register */
6644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6645 switch (pVCpu->iem.s.enmEffOpSize)
6646 {
6647 case IEMMODE_16BIT:
6648 IEM_MC_BEGIN(3, 0);
6649 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6650 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6651 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6652 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6653 IEM_MC_REF_EFLAGS(pEFlags);
6654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_32BIT:
6660 IEM_MC_BEGIN(3, 0);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6663 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6664 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6665 IEM_MC_REF_EFLAGS(pEFlags);
6666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6667 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6668 IEM_MC_ADVANCE_RIP();
6669 IEM_MC_END();
6670 return VINF_SUCCESS;
6671
6672 case IEMMODE_64BIT:
6673 IEM_MC_BEGIN(3, 0);
6674 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6675 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6677 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6678 IEM_MC_REF_EFLAGS(pEFlags);
6679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6680 IEM_MC_ADVANCE_RIP();
6681 IEM_MC_END();
6682 return VINF_SUCCESS;
6683
6684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6685 }
6686 }
6687 else
6688 {
6689 /* memory */
6690 switch (pVCpu->iem.s.enmEffOpSize)
6691 {
6692 case IEMMODE_16BIT:
6693 IEM_MC_BEGIN(3, 2);
6694 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6695 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6696 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6698
6699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6701 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6702 IEM_MC_FETCH_EFLAGS(EFlags);
6703 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6704
6705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6706 IEM_MC_COMMIT_EFLAGS(EFlags);
6707 IEM_MC_ADVANCE_RIP();
6708 IEM_MC_END();
6709 return VINF_SUCCESS;
6710
6711 case IEMMODE_32BIT:
6712 IEM_MC_BEGIN(3, 2);
6713 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6714 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6715 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6717
6718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6720 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6721 IEM_MC_FETCH_EFLAGS(EFlags);
6722 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6723
6724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6725 IEM_MC_COMMIT_EFLAGS(EFlags);
6726 IEM_MC_ADVANCE_RIP();
6727 IEM_MC_END();
6728 return VINF_SUCCESS;
6729
6730 case IEMMODE_64BIT:
6731 IEM_MC_BEGIN(3, 2);
6732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6733 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6734 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6736
6737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6739 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6740 IEM_MC_FETCH_EFLAGS(EFlags);
6741 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6742
6743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6744 IEM_MC_COMMIT_EFLAGS(EFlags);
6745 IEM_MC_ADVANCE_RIP();
6746 IEM_MC_END();
6747 return VINF_SUCCESS;
6748
6749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6750 }
6751 }
6752}
6753
6754
6755/**
6756 * @opcode 0xd2
6757 */
6758FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6759{
6760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6761 PCIEMOPSHIFTSIZES pImpl;
6762 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6763 {
6764 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6765 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6766 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6767 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6768 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6769 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6770 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6771 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6772 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6773 }
6774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6775
6776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6777 {
6778 /* register */
6779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6780 IEM_MC_BEGIN(3, 0);
6781 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6782 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6783 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6784 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6785 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6786 IEM_MC_REF_EFLAGS(pEFlags);
6787 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6788 IEM_MC_ADVANCE_RIP();
6789 IEM_MC_END();
6790 }
6791 else
6792 {
6793 /* memory */
6794 IEM_MC_BEGIN(3, 2);
6795 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6796 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6797 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6799
6800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6802 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6803 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6804 IEM_MC_FETCH_EFLAGS(EFlags);
6805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6806
6807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6808 IEM_MC_COMMIT_EFLAGS(EFlags);
6809 IEM_MC_ADVANCE_RIP();
6810 IEM_MC_END();
6811 }
6812 return VINF_SUCCESS;
6813}
6814
6815
6816/**
6817 * @opcode 0xd3
6818 */
6819FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6820{
6821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6822 PCIEMOPSHIFTSIZES pImpl;
6823 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6824 {
6825 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6826 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6827 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6828 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6829 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6830 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6831 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6832 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6833 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6834 }
6835 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6836
6837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6838 {
6839 /* register */
6840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6841 switch (pVCpu->iem.s.enmEffOpSize)
6842 {
6843 case IEMMODE_16BIT:
6844 IEM_MC_BEGIN(3, 0);
6845 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6846 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6848 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6849 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6850 IEM_MC_REF_EFLAGS(pEFlags);
6851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6852 IEM_MC_ADVANCE_RIP();
6853 IEM_MC_END();
6854 return VINF_SUCCESS;
6855
6856 case IEMMODE_32BIT:
6857 IEM_MC_BEGIN(3, 0);
6858 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6859 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6860 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6861 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6862 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6863 IEM_MC_REF_EFLAGS(pEFlags);
6864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6865 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6866 IEM_MC_ADVANCE_RIP();
6867 IEM_MC_END();
6868 return VINF_SUCCESS;
6869
6870 case IEMMODE_64BIT:
6871 IEM_MC_BEGIN(3, 0);
6872 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6873 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6874 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6875 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6876 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6877 IEM_MC_REF_EFLAGS(pEFlags);
6878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6879 IEM_MC_ADVANCE_RIP();
6880 IEM_MC_END();
6881 return VINF_SUCCESS;
6882
6883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6884 }
6885 }
6886 else
6887 {
6888 /* memory */
6889 switch (pVCpu->iem.s.enmEffOpSize)
6890 {
6891 case IEMMODE_16BIT:
6892 IEM_MC_BEGIN(3, 2);
6893 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6894 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6895 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6897
6898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6900 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6901 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6902 IEM_MC_FETCH_EFLAGS(EFlags);
6903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6904
6905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6906 IEM_MC_COMMIT_EFLAGS(EFlags);
6907 IEM_MC_ADVANCE_RIP();
6908 IEM_MC_END();
6909 return VINF_SUCCESS;
6910
6911 case IEMMODE_32BIT:
6912 IEM_MC_BEGIN(3, 2);
6913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6914 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6915 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6917
6918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6920 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6921 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6922 IEM_MC_FETCH_EFLAGS(EFlags);
6923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6924
6925 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6926 IEM_MC_COMMIT_EFLAGS(EFlags);
6927 IEM_MC_ADVANCE_RIP();
6928 IEM_MC_END();
6929 return VINF_SUCCESS;
6930
6931 case IEMMODE_64BIT:
6932 IEM_MC_BEGIN(3, 2);
6933 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6934 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6935 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6937
6938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6940 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6941 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6942 IEM_MC_FETCH_EFLAGS(EFlags);
6943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6944
6945 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6946 IEM_MC_COMMIT_EFLAGS(EFlags);
6947 IEM_MC_ADVANCE_RIP();
6948 IEM_MC_END();
6949 return VINF_SUCCESS;
6950
6951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6952 }
6953 }
6954}
6955
6956/**
6957 * @opcode 0xd4
6958 */
6959FNIEMOP_DEF(iemOp_aam_Ib)
6960{
6961 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6962 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6964 IEMOP_HLP_NO_64BIT();
6965 if (!bImm)
6966 return IEMOP_RAISE_DIVIDE_ERROR();
6967 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6968}
6969
6970
6971/**
6972 * @opcode 0xd5
6973 */
6974FNIEMOP_DEF(iemOp_aad_Ib)
6975{
6976 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6977 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6979 IEMOP_HLP_NO_64BIT();
6980 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6981}
6982
6983
6984/**
6985 * @opcode 0xd6
6986 */
6987FNIEMOP_DEF(iemOp_salc)
6988{
6989 IEMOP_MNEMONIC(salc, "salc");
6990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6991 IEMOP_HLP_NO_64BIT();
6992
6993 IEM_MC_BEGIN(0, 0);
6994 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6995 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6996 } IEM_MC_ELSE() {
6997 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6998 } IEM_MC_ENDIF();
6999 IEM_MC_ADVANCE_RIP();
7000 IEM_MC_END();
7001 return VINF_SUCCESS;
7002}
7003
7004
7005/**
7006 * @opcode 0xd7
7007 */
7008FNIEMOP_DEF(iemOp_xlat)
7009{
7010 IEMOP_MNEMONIC(xlat, "xlat");
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 switch (pVCpu->iem.s.enmEffAddrMode)
7013 {
7014 case IEMMODE_16BIT:
7015 IEM_MC_BEGIN(2, 0);
7016 IEM_MC_LOCAL(uint8_t, u8Tmp);
7017 IEM_MC_LOCAL(uint16_t, u16Addr);
7018 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7019 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7020 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7021 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7022 IEM_MC_ADVANCE_RIP();
7023 IEM_MC_END();
7024 return VINF_SUCCESS;
7025
7026 case IEMMODE_32BIT:
7027 IEM_MC_BEGIN(2, 0);
7028 IEM_MC_LOCAL(uint8_t, u8Tmp);
7029 IEM_MC_LOCAL(uint32_t, u32Addr);
7030 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7031 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7032 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7033 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7034 IEM_MC_ADVANCE_RIP();
7035 IEM_MC_END();
7036 return VINF_SUCCESS;
7037
7038 case IEMMODE_64BIT:
7039 IEM_MC_BEGIN(2, 0);
7040 IEM_MC_LOCAL(uint8_t, u8Tmp);
7041 IEM_MC_LOCAL(uint64_t, u64Addr);
7042 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7043 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7044 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7045 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7046 IEM_MC_ADVANCE_RIP();
7047 IEM_MC_END();
7048 return VINF_SUCCESS;
7049
7050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7051 }
7052}
7053
7054
7055/**
7056 * Common worker for FPU instructions working on ST0 and STn, and storing the
7057 * result in ST0.
7058 *
7059 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7060 */
7061FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7062{
7063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7064
7065 IEM_MC_BEGIN(3, 1);
7066 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7067 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7068 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7069 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7070
7071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7073 IEM_MC_PREPARE_FPU_USAGE();
7074 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7075 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7076 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7077 IEM_MC_ELSE()
7078 IEM_MC_FPU_STACK_UNDERFLOW(0);
7079 IEM_MC_ENDIF();
7080 IEM_MC_ADVANCE_RIP();
7081
7082 IEM_MC_END();
7083 return VINF_SUCCESS;
7084}
7085
7086
7087/**
7088 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7089 * flags.
7090 *
7091 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7092 */
7093FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7094{
7095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7096
7097 IEM_MC_BEGIN(3, 1);
7098 IEM_MC_LOCAL(uint16_t, u16Fsw);
7099 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7100 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7101 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7102
7103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7104 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7105 IEM_MC_PREPARE_FPU_USAGE();
7106 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7107 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7108 IEM_MC_UPDATE_FSW(u16Fsw);
7109 IEM_MC_ELSE()
7110 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7111 IEM_MC_ENDIF();
7112 IEM_MC_ADVANCE_RIP();
7113
7114 IEM_MC_END();
7115 return VINF_SUCCESS;
7116}
7117
7118
7119/**
7120 * Common worker for FPU instructions working on ST0 and STn, only affecting
7121 * flags, and popping when done.
7122 *
7123 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7124 */
7125FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7126{
7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7128
7129 IEM_MC_BEGIN(3, 1);
7130 IEM_MC_LOCAL(uint16_t, u16Fsw);
7131 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7132 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7133 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7134
7135 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7136 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7137 IEM_MC_PREPARE_FPU_USAGE();
7138 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7139 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7140 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7141 IEM_MC_ELSE()
7142 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7143 IEM_MC_ENDIF();
7144 IEM_MC_ADVANCE_RIP();
7145
7146 IEM_MC_END();
7147 return VINF_SUCCESS;
7148}
7149
7150
7151/** Opcode 0xd8 11/0. */
7152FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7153{
7154 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7155 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7156}
7157
7158
7159/** Opcode 0xd8 11/1. */
7160FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7161{
7162 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7163 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7164}
7165
7166
7167/** Opcode 0xd8 11/2. */
7168FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7169{
7170 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7171 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7172}
7173
7174
7175/** Opcode 0xd8 11/3. */
7176FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7177{
7178 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7179 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7180}
7181
7182
7183/** Opcode 0xd8 11/4. */
7184FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7185{
7186 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7187 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7188}
7189
7190
7191/** Opcode 0xd8 11/5. */
7192FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7193{
7194 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7195 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7196}
7197
7198
7199/** Opcode 0xd8 11/6. */
7200FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7201{
7202 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7203 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7204}
7205
7206
7207/** Opcode 0xd8 11/7. */
7208FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7209{
7210 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7211 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7212}
7213
7214
7215/**
7216 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7217 * the result in ST0.
7218 *
7219 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7220 */
7221FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7222{
7223 IEM_MC_BEGIN(3, 3);
7224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7225 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7226 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7227 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7228 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7229 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7230
7231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7233
7234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7235 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7236 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7237
7238 IEM_MC_PREPARE_FPU_USAGE();
7239 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7240 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7241 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7242 IEM_MC_ELSE()
7243 IEM_MC_FPU_STACK_UNDERFLOW(0);
7244 IEM_MC_ENDIF();
7245 IEM_MC_ADVANCE_RIP();
7246
7247 IEM_MC_END();
7248 return VINF_SUCCESS;
7249}
7250
7251
7252/** Opcode 0xd8 !11/0. */
7253FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7254{
7255 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7256 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7257}
7258
7259
7260/** Opcode 0xd8 !11/1. */
7261FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7262{
7263 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7264 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7265}
7266
7267
7268/** Opcode 0xd8 !11/2. */
7269FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7270{
7271 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7272
7273 IEM_MC_BEGIN(3, 3);
7274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7275 IEM_MC_LOCAL(uint16_t, u16Fsw);
7276 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7277 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7278 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7279 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7280
7281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283
7284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7286 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7287
7288 IEM_MC_PREPARE_FPU_USAGE();
7289 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7290 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7291 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7292 IEM_MC_ELSE()
7293 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7294 IEM_MC_ENDIF();
7295 IEM_MC_ADVANCE_RIP();
7296
7297 IEM_MC_END();
7298 return VINF_SUCCESS;
7299}
7300
7301
7302/** Opcode 0xd8 !11/3. */
7303FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7304{
7305 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7306
7307 IEM_MC_BEGIN(3, 3);
7308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7309 IEM_MC_LOCAL(uint16_t, u16Fsw);
7310 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7311 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7313 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7314
7315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7317
7318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7320 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7321
7322 IEM_MC_PREPARE_FPU_USAGE();
7323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7324 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7325 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7326 IEM_MC_ELSE()
7327 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7328 IEM_MC_ENDIF();
7329 IEM_MC_ADVANCE_RIP();
7330
7331 IEM_MC_END();
7332 return VINF_SUCCESS;
7333}
7334
7335
7336/** Opcode 0xd8 !11/4. */
7337FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7338{
7339 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7340 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7341}
7342
7343
7344/** Opcode 0xd8 !11/5. */
7345FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7346{
7347 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7349}
7350
7351
7352/** Opcode 0xd8 !11/6. */
7353FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7354{
7355 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7356 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7357}
7358
7359
7360/** Opcode 0xd8 !11/7. */
7361FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7362{
7363 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7364 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7365}
7366
7367
7368/**
7369 * @opcode 0xd8
7370 */
7371FNIEMOP_DEF(iemOp_EscF0)
7372{
7373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7374 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7375
7376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7377 {
7378 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7379 {
7380 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7381 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7382 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7383 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7384 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7385 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7386 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7387 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7389 }
7390 }
7391 else
7392 {
7393 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7394 {
7395 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7396 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7397 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7398 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7399 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7400 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7401 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7402 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7404 }
7405 }
7406}
7407
7408
7409/** Opcode 0xd9 /0 mem32real
7410 * @sa iemOp_fld_m64r */
7411FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7412{
7413 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7414
7415 IEM_MC_BEGIN(2, 3);
7416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7417 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7418 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7419 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7420 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7421
7422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7424
7425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7427 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7428
7429 IEM_MC_PREPARE_FPU_USAGE();
7430 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7431 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7432 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7433 IEM_MC_ELSE()
7434 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7435 IEM_MC_ENDIF();
7436 IEM_MC_ADVANCE_RIP();
7437
7438 IEM_MC_END();
7439 return VINF_SUCCESS;
7440}
7441
7442
7443/** Opcode 0xd9 !11/2 mem32real */
7444FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7445{
7446 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7447 IEM_MC_BEGIN(3, 2);
7448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7449 IEM_MC_LOCAL(uint16_t, u16Fsw);
7450 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7451 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7452 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7453
7454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7456 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7457 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7458
7459 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7460 IEM_MC_PREPARE_FPU_USAGE();
7461 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7462 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7463 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7464 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7465 IEM_MC_ELSE()
7466 IEM_MC_IF_FCW_IM()
7467 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7468 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7469 IEM_MC_ENDIF();
7470 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7471 IEM_MC_ENDIF();
7472 IEM_MC_ADVANCE_RIP();
7473
7474 IEM_MC_END();
7475 return VINF_SUCCESS;
7476}
7477
7478
7479/** Opcode 0xd9 !11/3 */
7480FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7481{
7482 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7483 IEM_MC_BEGIN(3, 2);
7484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7485 IEM_MC_LOCAL(uint16_t, u16Fsw);
7486 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7487 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7488 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7489
7490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7492 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7493 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7494
7495 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7496 IEM_MC_PREPARE_FPU_USAGE();
7497 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7498 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7499 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7500 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7501 IEM_MC_ELSE()
7502 IEM_MC_IF_FCW_IM()
7503 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7504 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7505 IEM_MC_ENDIF();
7506 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7507 IEM_MC_ENDIF();
7508 IEM_MC_ADVANCE_RIP();
7509
7510 IEM_MC_END();
7511 return VINF_SUCCESS;
7512}
7513
7514
7515/** Opcode 0xd9 !11/4 */
7516FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7517{
7518 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7519 IEM_MC_BEGIN(3, 0);
7520 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7521 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7522 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7526 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7527 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7528 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7529 IEM_MC_END();
7530 return VINF_SUCCESS;
7531}
7532
7533
7534/** Opcode 0xd9 !11/5 */
7535FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7536{
7537 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7538 IEM_MC_BEGIN(1, 1);
7539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7540 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7544 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7545 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7546 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7547 IEM_MC_END();
7548 return VINF_SUCCESS;
7549}
7550
7551
7552/** Opcode 0xd9 !11/6 */
7553FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7554{
7555 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7556 IEM_MC_BEGIN(3, 0);
7557 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7558 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7559 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7562 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7563 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7564 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7565 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7566 IEM_MC_END();
7567 return VINF_SUCCESS;
7568}
7569
7570
7571/** Opcode 0xd9 !11/7 */
7572FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7573{
7574 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7575 IEM_MC_BEGIN(2, 0);
7576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7577 IEM_MC_LOCAL(uint16_t, u16Fcw);
7578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7580 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7581 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7582 IEM_MC_FETCH_FCW(u16Fcw);
7583 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7584 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7585 IEM_MC_END();
7586 return VINF_SUCCESS;
7587}
7588
7589
7590/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7591FNIEMOP_DEF(iemOp_fnop)
7592{
7593 IEMOP_MNEMONIC(fnop, "fnop");
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7595
7596 IEM_MC_BEGIN(0, 0);
7597 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7598 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7599 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7600 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7601 * intel optimizations. Investigate. */
7602 IEM_MC_UPDATE_FPU_OPCODE_IP();
7603 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7604 IEM_MC_END();
7605 return VINF_SUCCESS;
7606}
7607
7608
7609/** Opcode 0xd9 11/0 stN */
7610FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7611{
7612 IEMOP_MNEMONIC(fld_stN, "fld stN");
7613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7614
7615 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7616 * indicates that it does. */
7617 IEM_MC_BEGIN(0, 2);
7618 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7622
7623 IEM_MC_PREPARE_FPU_USAGE();
7624 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7625 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7626 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7627 IEM_MC_ELSE()
7628 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7629 IEM_MC_ENDIF();
7630
7631 IEM_MC_ADVANCE_RIP();
7632 IEM_MC_END();
7633
7634 return VINF_SUCCESS;
7635}
7636
7637
7638/** Opcode 0xd9 11/3 stN */
7639FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7640{
7641 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643
7644 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7645 * indicates that it does. */
7646 IEM_MC_BEGIN(1, 3);
7647 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7648 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7649 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7650 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7653
7654 IEM_MC_PREPARE_FPU_USAGE();
7655 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7656 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7657 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7658 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7659 IEM_MC_ELSE()
7660 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7661 IEM_MC_ENDIF();
7662
7663 IEM_MC_ADVANCE_RIP();
7664 IEM_MC_END();
7665
7666 return VINF_SUCCESS;
7667}
7668
7669
7670/** Opcode 0xd9 11/4, 0xdd 11/2. */
7671FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7672{
7673 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7675
7676 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7677 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7678 if (!iDstReg)
7679 {
7680 IEM_MC_BEGIN(0, 1);
7681 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7684
7685 IEM_MC_PREPARE_FPU_USAGE();
7686 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7687 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7688 IEM_MC_ELSE()
7689 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7690 IEM_MC_ENDIF();
7691
7692 IEM_MC_ADVANCE_RIP();
7693 IEM_MC_END();
7694 }
7695 else
7696 {
7697 IEM_MC_BEGIN(0, 2);
7698 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7699 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7700 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7701 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7702
7703 IEM_MC_PREPARE_FPU_USAGE();
7704 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7705 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7706 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7707 IEM_MC_ELSE()
7708 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7709 IEM_MC_ENDIF();
7710
7711 IEM_MC_ADVANCE_RIP();
7712 IEM_MC_END();
7713 }
7714 return VINF_SUCCESS;
7715}
7716
7717
7718/**
7719 * Common worker for FPU instructions working on ST0 and replaces it with the
7720 * result, i.e. unary operators.
7721 *
7722 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7723 */
7724FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7725{
7726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7727
7728 IEM_MC_BEGIN(2, 1);
7729 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7730 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7731 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7732
7733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7735 IEM_MC_PREPARE_FPU_USAGE();
7736 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7737 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7738 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7739 IEM_MC_ELSE()
7740 IEM_MC_FPU_STACK_UNDERFLOW(0);
7741 IEM_MC_ENDIF();
7742 IEM_MC_ADVANCE_RIP();
7743
7744 IEM_MC_END();
7745 return VINF_SUCCESS;
7746}
7747
7748
7749/** Opcode 0xd9 0xe0. */
7750FNIEMOP_DEF(iemOp_fchs)
7751{
7752 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7753 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7754}
7755
7756
7757/** Opcode 0xd9 0xe1. */
7758FNIEMOP_DEF(iemOp_fabs)
7759{
7760 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7761 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7762}
7763
7764
7765/**
7766 * Common worker for FPU instructions working on ST0 and only returns FSW.
7767 *
7768 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7769 */
7770FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7771{
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773
7774 IEM_MC_BEGIN(2, 1);
7775 IEM_MC_LOCAL(uint16_t, u16Fsw);
7776 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7778
7779 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7780 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7781 IEM_MC_PREPARE_FPU_USAGE();
7782 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7783 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7784 IEM_MC_UPDATE_FSW(u16Fsw);
7785 IEM_MC_ELSE()
7786 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7787 IEM_MC_ENDIF();
7788 IEM_MC_ADVANCE_RIP();
7789
7790 IEM_MC_END();
7791 return VINF_SUCCESS;
7792}
7793
7794
7795/** Opcode 0xd9 0xe4. */
7796FNIEMOP_DEF(iemOp_ftst)
7797{
7798 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7799 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7800}
7801
7802
7803/** Opcode 0xd9 0xe5. */
7804FNIEMOP_DEF(iemOp_fxam)
7805{
7806 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7807 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7808}
7809
7810
7811/**
7812 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7813 *
7814 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7815 */
7816FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7817{
7818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7819
7820 IEM_MC_BEGIN(1, 1);
7821 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7822 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7823
7824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7825 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7826 IEM_MC_PREPARE_FPU_USAGE();
7827 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7828 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7829 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7830 IEM_MC_ELSE()
7831 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7832 IEM_MC_ENDIF();
7833 IEM_MC_ADVANCE_RIP();
7834
7835 IEM_MC_END();
7836 return VINF_SUCCESS;
7837}
7838
7839
7840/** Opcode 0xd9 0xe8. */
7841FNIEMOP_DEF(iemOp_fld1)
7842{
7843 IEMOP_MNEMONIC(fld1, "fld1");
7844 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7845}
7846
7847
7848/** Opcode 0xd9 0xe9. */
7849FNIEMOP_DEF(iemOp_fldl2t)
7850{
7851 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7852 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7853}
7854
7855
7856/** Opcode 0xd9 0xea. */
7857FNIEMOP_DEF(iemOp_fldl2e)
7858{
7859 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7860 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7861}
7862
7863/** Opcode 0xd9 0xeb. */
7864FNIEMOP_DEF(iemOp_fldpi)
7865{
7866 IEMOP_MNEMONIC(fldpi, "fldpi");
7867 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7868}
7869
7870
7871/** Opcode 0xd9 0xec. */
7872FNIEMOP_DEF(iemOp_fldlg2)
7873{
7874 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7875 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7876}
7877
7878/** Opcode 0xd9 0xed. */
7879FNIEMOP_DEF(iemOp_fldln2)
7880{
7881 IEMOP_MNEMONIC(fldln2, "fldln2");
7882 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7883}
7884
7885
7886/** Opcode 0xd9 0xee. */
7887FNIEMOP_DEF(iemOp_fldz)
7888{
7889 IEMOP_MNEMONIC(fldz, "fldz");
7890 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7891}
7892
7893
7894/** Opcode 0xd9 0xf0. */
7895FNIEMOP_DEF(iemOp_f2xm1)
7896{
7897 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7898 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7899}
7900
7901
7902/**
7903 * Common worker for FPU instructions working on STn and ST0, storing the result
7904 * in STn, and popping the stack unless IE, DE or ZE was raised.
7905 *
7906 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7907 */
7908FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7909{
7910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7911
7912 IEM_MC_BEGIN(3, 1);
7913 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7914 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7915 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7916 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7917
7918 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7919 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7920
7921 IEM_MC_PREPARE_FPU_USAGE();
7922 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7923 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7924 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7925 IEM_MC_ELSE()
7926 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7927 IEM_MC_ENDIF();
7928 IEM_MC_ADVANCE_RIP();
7929
7930 IEM_MC_END();
7931 return VINF_SUCCESS;
7932}
7933
7934
7935/** Opcode 0xd9 0xf1. */
7936FNIEMOP_DEF(iemOp_fyl2x)
7937{
7938 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7939 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7940}
7941
7942
7943/**
7944 * Common worker for FPU instructions working on ST0 and having two outputs, one
7945 * replacing ST0 and one pushed onto the stack.
7946 *
7947 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7948 */
7949FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7950{
7951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7952
7953 IEM_MC_BEGIN(2, 1);
7954 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7955 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7956 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7957
7958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7960 IEM_MC_PREPARE_FPU_USAGE();
7961 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7962 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7963 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7964 IEM_MC_ELSE()
7965 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7966 IEM_MC_ENDIF();
7967 IEM_MC_ADVANCE_RIP();
7968
7969 IEM_MC_END();
7970 return VINF_SUCCESS;
7971}
7972
7973
7974/** Opcode 0xd9 0xf2. */
7975FNIEMOP_DEF(iemOp_fptan)
7976{
7977 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7978 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7979}
7980
7981
7982/** Opcode 0xd9 0xf3. */
7983FNIEMOP_DEF(iemOp_fpatan)
7984{
7985 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7986 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7987}
7988
7989
7990/** Opcode 0xd9 0xf4. */
7991FNIEMOP_DEF(iemOp_fxtract)
7992{
7993 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7994 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7995}
7996
7997
7998/** Opcode 0xd9 0xf5. */
7999FNIEMOP_DEF(iemOp_fprem1)
8000{
8001 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8002 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8003}
8004
8005
8006/** Opcode 0xd9 0xf6. */
8007FNIEMOP_DEF(iemOp_fdecstp)
8008{
8009 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8012 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8013 * FINCSTP and FDECSTP. */
8014
8015 IEM_MC_BEGIN(0,0);
8016
8017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8019
8020 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8021 IEM_MC_FPU_STACK_DEC_TOP();
8022 IEM_MC_UPDATE_FSW_CONST(0);
8023
8024 IEM_MC_ADVANCE_RIP();
8025 IEM_MC_END();
8026 return VINF_SUCCESS;
8027}
8028
8029
8030/** Opcode 0xd9 0xf7. */
8031FNIEMOP_DEF(iemOp_fincstp)
8032{
8033 IEMOP_MNEMONIC(fincstp, "fincstp");
8034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8035 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8036 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8037 * FINCSTP and FDECSTP. */
8038
8039 IEM_MC_BEGIN(0,0);
8040
8041 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8042 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8043
8044 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8045 IEM_MC_FPU_STACK_INC_TOP();
8046 IEM_MC_UPDATE_FSW_CONST(0);
8047
8048 IEM_MC_ADVANCE_RIP();
8049 IEM_MC_END();
8050 return VINF_SUCCESS;
8051}
8052
8053
8054/** Opcode 0xd9 0xf8. */
8055FNIEMOP_DEF(iemOp_fprem)
8056{
8057 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8058 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8059}
8060
8061
8062/** Opcode 0xd9 0xf9. */
8063FNIEMOP_DEF(iemOp_fyl2xp1)
8064{
8065 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8066 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8067}
8068
8069
8070/** Opcode 0xd9 0xfa. */
8071FNIEMOP_DEF(iemOp_fsqrt)
8072{
8073 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8074 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8075}
8076
8077
8078/** Opcode 0xd9 0xfb. */
8079FNIEMOP_DEF(iemOp_fsincos)
8080{
8081 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8082 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8083}
8084
8085
8086/** Opcode 0xd9 0xfc. */
8087FNIEMOP_DEF(iemOp_frndint)
8088{
8089 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8090 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8091}
8092
8093
8094/** Opcode 0xd9 0xfd. */
8095FNIEMOP_DEF(iemOp_fscale)
8096{
8097 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8098 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8099}
8100
8101
8102/** Opcode 0xd9 0xfe. */
8103FNIEMOP_DEF(iemOp_fsin)
8104{
8105 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8106 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8107}
8108
8109
8110/** Opcode 0xd9 0xff. */
8111FNIEMOP_DEF(iemOp_fcos)
8112{
8113 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8114 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8115}
8116
8117
8118/** Used by iemOp_EscF1. */
8119IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8120{
8121 /* 0xe0 */ iemOp_fchs,
8122 /* 0xe1 */ iemOp_fabs,
8123 /* 0xe2 */ iemOp_Invalid,
8124 /* 0xe3 */ iemOp_Invalid,
8125 /* 0xe4 */ iemOp_ftst,
8126 /* 0xe5 */ iemOp_fxam,
8127 /* 0xe6 */ iemOp_Invalid,
8128 /* 0xe7 */ iemOp_Invalid,
8129 /* 0xe8 */ iemOp_fld1,
8130 /* 0xe9 */ iemOp_fldl2t,
8131 /* 0xea */ iemOp_fldl2e,
8132 /* 0xeb */ iemOp_fldpi,
8133 /* 0xec */ iemOp_fldlg2,
8134 /* 0xed */ iemOp_fldln2,
8135 /* 0xee */ iemOp_fldz,
8136 /* 0xef */ iemOp_Invalid,
8137 /* 0xf0 */ iemOp_f2xm1,
8138 /* 0xf1 */ iemOp_fyl2x,
8139 /* 0xf2 */ iemOp_fptan,
8140 /* 0xf3 */ iemOp_fpatan,
8141 /* 0xf4 */ iemOp_fxtract,
8142 /* 0xf5 */ iemOp_fprem1,
8143 /* 0xf6 */ iemOp_fdecstp,
8144 /* 0xf7 */ iemOp_fincstp,
8145 /* 0xf8 */ iemOp_fprem,
8146 /* 0xf9 */ iemOp_fyl2xp1,
8147 /* 0xfa */ iemOp_fsqrt,
8148 /* 0xfb */ iemOp_fsincos,
8149 /* 0xfc */ iemOp_frndint,
8150 /* 0xfd */ iemOp_fscale,
8151 /* 0xfe */ iemOp_fsin,
8152 /* 0xff */ iemOp_fcos
8153};
8154
8155
8156/**
8157 * @opcode 0xd9
8158 */
8159FNIEMOP_DEF(iemOp_EscF1)
8160{
8161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8162 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8163
8164 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8165 {
8166 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8167 {
8168 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8169 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8170 case 2:
8171 if (bRm == 0xd0)
8172 return FNIEMOP_CALL(iemOp_fnop);
8173 return IEMOP_RAISE_INVALID_OPCODE();
8174 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8175 case 4:
8176 case 5:
8177 case 6:
8178 case 7:
8179 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8180 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8182 }
8183 }
8184 else
8185 {
8186 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8187 {
8188 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8189 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8190 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8191 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8192 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8193 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8194 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8195 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8197 }
8198 }
8199}
8200
8201
8202/** Opcode 0xda 11/0. */
8203FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8204{
8205 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8207
8208 IEM_MC_BEGIN(0, 1);
8209 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8210
8211 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8212 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8213
8214 IEM_MC_PREPARE_FPU_USAGE();
8215 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8216 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8217 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8218 IEM_MC_ENDIF();
8219 IEM_MC_UPDATE_FPU_OPCODE_IP();
8220 IEM_MC_ELSE()
8221 IEM_MC_FPU_STACK_UNDERFLOW(0);
8222 IEM_MC_ENDIF();
8223 IEM_MC_ADVANCE_RIP();
8224
8225 IEM_MC_END();
8226 return VINF_SUCCESS;
8227}
8228
8229
8230/** Opcode 0xda 11/1. */
8231FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8232{
8233 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8235
8236 IEM_MC_BEGIN(0, 1);
8237 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8238
8239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8241
8242 IEM_MC_PREPARE_FPU_USAGE();
8243 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8244 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8245 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8246 IEM_MC_ENDIF();
8247 IEM_MC_UPDATE_FPU_OPCODE_IP();
8248 IEM_MC_ELSE()
8249 IEM_MC_FPU_STACK_UNDERFLOW(0);
8250 IEM_MC_ENDIF();
8251 IEM_MC_ADVANCE_RIP();
8252
8253 IEM_MC_END();
8254 return VINF_SUCCESS;
8255}
8256
8257
8258/** Opcode 0xda 11/2. */
8259FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8260{
8261 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8263
8264 IEM_MC_BEGIN(0, 1);
8265 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8266
8267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8269
8270 IEM_MC_PREPARE_FPU_USAGE();
8271 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8272 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8273 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8274 IEM_MC_ENDIF();
8275 IEM_MC_UPDATE_FPU_OPCODE_IP();
8276 IEM_MC_ELSE()
8277 IEM_MC_FPU_STACK_UNDERFLOW(0);
8278 IEM_MC_ENDIF();
8279 IEM_MC_ADVANCE_RIP();
8280
8281 IEM_MC_END();
8282 return VINF_SUCCESS;
8283}
8284
8285
8286/** Opcode 0xda 11/3. */
8287FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8288{
8289 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8291
8292 IEM_MC_BEGIN(0, 1);
8293 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8294
8295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8297
8298 IEM_MC_PREPARE_FPU_USAGE();
8299 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8301 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8302 IEM_MC_ENDIF();
8303 IEM_MC_UPDATE_FPU_OPCODE_IP();
8304 IEM_MC_ELSE()
8305 IEM_MC_FPU_STACK_UNDERFLOW(0);
8306 IEM_MC_ENDIF();
8307 IEM_MC_ADVANCE_RIP();
8308
8309 IEM_MC_END();
8310 return VINF_SUCCESS;
8311}
8312
8313
8314/**
8315 * Common worker for FPU instructions working on ST0 and STn, only affecting
8316 * flags, and popping twice when done.
8317 *
8318 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8319 */
8320FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8321{
8322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8323
8324 IEM_MC_BEGIN(3, 1);
8325 IEM_MC_LOCAL(uint16_t, u16Fsw);
8326 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8327 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8328 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8329
8330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8332
8333 IEM_MC_PREPARE_FPU_USAGE();
8334 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8335 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8336 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8337 IEM_MC_ELSE()
8338 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8339 IEM_MC_ENDIF();
8340 IEM_MC_ADVANCE_RIP();
8341
8342 IEM_MC_END();
8343 return VINF_SUCCESS;
8344}
8345
8346
8347/** Opcode 0xda 0xe9. */
8348FNIEMOP_DEF(iemOp_fucompp)
8349{
8350 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8351 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8352}
8353
8354
8355/**
8356 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8357 * the result in ST0.
8358 *
8359 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8360 */
8361FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8362{
8363 IEM_MC_BEGIN(3, 3);
8364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8365 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8366 IEM_MC_LOCAL(int32_t, i32Val2);
8367 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8368 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8369 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8370
8371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8373
8374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8376 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8377
8378 IEM_MC_PREPARE_FPU_USAGE();
8379 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8380 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8381 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8382 IEM_MC_ELSE()
8383 IEM_MC_FPU_STACK_UNDERFLOW(0);
8384 IEM_MC_ENDIF();
8385 IEM_MC_ADVANCE_RIP();
8386
8387 IEM_MC_END();
8388 return VINF_SUCCESS;
8389}
8390
8391
8392/** Opcode 0xda !11/0. */
8393FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8394{
8395 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8397}
8398
8399
8400/** Opcode 0xda !11/1. */
8401FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8402{
8403 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8404 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8405}
8406
8407
8408/** Opcode 0xda !11/2. */
8409FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8410{
8411 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8412
8413 IEM_MC_BEGIN(3, 3);
8414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8415 IEM_MC_LOCAL(uint16_t, u16Fsw);
8416 IEM_MC_LOCAL(int32_t, i32Val2);
8417 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8418 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8419 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8420
8421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8423
8424 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8425 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8426 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8427
8428 IEM_MC_PREPARE_FPU_USAGE();
8429 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8430 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8431 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8432 IEM_MC_ELSE()
8433 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8434 IEM_MC_ENDIF();
8435 IEM_MC_ADVANCE_RIP();
8436
8437 IEM_MC_END();
8438 return VINF_SUCCESS;
8439}
8440
8441
8442/** Opcode 0xda !11/3. */
8443FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8444{
8445 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8446
8447 IEM_MC_BEGIN(3, 3);
8448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8449 IEM_MC_LOCAL(uint16_t, u16Fsw);
8450 IEM_MC_LOCAL(int32_t, i32Val2);
8451 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8452 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8453 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8454
8455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8457
8458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8460 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8461
8462 IEM_MC_PREPARE_FPU_USAGE();
8463 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8464 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8465 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8466 IEM_MC_ELSE()
8467 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8468 IEM_MC_ENDIF();
8469 IEM_MC_ADVANCE_RIP();
8470
8471 IEM_MC_END();
8472 return VINF_SUCCESS;
8473}
8474
8475
8476/** Opcode 0xda !11/4. */
8477FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8478{
8479 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8480 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8481}
8482
8483
8484/** Opcode 0xda !11/5. */
8485FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8486{
8487 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8488 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8489}
8490
8491
8492/** Opcode 0xda !11/6. */
8493FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8494{
8495 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8496 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8497}
8498
8499
8500/** Opcode 0xda !11/7. */
8501FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8502{
8503 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8504 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8505}
8506
8507
8508/**
8509 * @opcode 0xda
8510 */
8511FNIEMOP_DEF(iemOp_EscF2)
8512{
8513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8514 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8516 {
8517 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8518 {
8519 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8520 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8521 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8522 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8523 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8524 case 5:
8525 if (bRm == 0xe9)
8526 return FNIEMOP_CALL(iemOp_fucompp);
8527 return IEMOP_RAISE_INVALID_OPCODE();
8528 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8529 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8531 }
8532 }
8533 else
8534 {
8535 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8536 {
8537 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8538 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8539 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8540 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8541 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8542 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8543 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8544 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8546 }
8547 }
8548}
8549
8550
8551/** Opcode 0xdb !11/0. */
8552FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8553{
8554 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8555
8556 IEM_MC_BEGIN(2, 3);
8557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8558 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8559 IEM_MC_LOCAL(int32_t, i32Val);
8560 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8561 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8562
8563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565
8566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8568 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8569
8570 IEM_MC_PREPARE_FPU_USAGE();
8571 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8572 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8573 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8574 IEM_MC_ELSE()
8575 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8576 IEM_MC_ENDIF();
8577 IEM_MC_ADVANCE_RIP();
8578
8579 IEM_MC_END();
8580 return VINF_SUCCESS;
8581}
8582
8583
8584/** Opcode 0xdb !11/1. */
8585FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8586{
8587 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8588 IEM_MC_BEGIN(3, 2);
8589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8590 IEM_MC_LOCAL(uint16_t, u16Fsw);
8591 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8592 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8593 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8594
8595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8598 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8599
8600 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8601 IEM_MC_PREPARE_FPU_USAGE();
8602 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8603 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8604 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8605 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8606 IEM_MC_ELSE()
8607 IEM_MC_IF_FCW_IM()
8608 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8609 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8610 IEM_MC_ENDIF();
8611 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8612 IEM_MC_ENDIF();
8613 IEM_MC_ADVANCE_RIP();
8614
8615 IEM_MC_END();
8616 return VINF_SUCCESS;
8617}
8618
8619
8620/** Opcode 0xdb !11/2. */
8621FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8622{
8623 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8624 IEM_MC_BEGIN(3, 2);
8625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8626 IEM_MC_LOCAL(uint16_t, u16Fsw);
8627 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8628 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8629 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8630
8631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8633 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8634 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8635
8636 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8637 IEM_MC_PREPARE_FPU_USAGE();
8638 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8639 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8640 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8641 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8642 IEM_MC_ELSE()
8643 IEM_MC_IF_FCW_IM()
8644 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8645 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8646 IEM_MC_ENDIF();
8647 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8648 IEM_MC_ENDIF();
8649 IEM_MC_ADVANCE_RIP();
8650
8651 IEM_MC_END();
8652 return VINF_SUCCESS;
8653}
8654
8655
8656/** Opcode 0xdb !11/3. */
8657FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8658{
8659 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8660 IEM_MC_BEGIN(3, 2);
8661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8662 IEM_MC_LOCAL(uint16_t, u16Fsw);
8663 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8664 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8665 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8666
8667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8671
8672 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8673 IEM_MC_PREPARE_FPU_USAGE();
8674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8675 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8676 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8677 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8678 IEM_MC_ELSE()
8679 IEM_MC_IF_FCW_IM()
8680 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8681 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8682 IEM_MC_ENDIF();
8683 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8684 IEM_MC_ENDIF();
8685 IEM_MC_ADVANCE_RIP();
8686
8687 IEM_MC_END();
8688 return VINF_SUCCESS;
8689}
8690
8691
8692/** Opcode 0xdb !11/5. */
8693FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8694{
8695 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8696
8697 IEM_MC_BEGIN(2, 3);
8698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8699 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8700 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8701 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8702 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8703
8704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8706
8707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8709 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8710
8711 IEM_MC_PREPARE_FPU_USAGE();
8712 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8713 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8714 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8715 IEM_MC_ELSE()
8716 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8717 IEM_MC_ENDIF();
8718 IEM_MC_ADVANCE_RIP();
8719
8720 IEM_MC_END();
8721 return VINF_SUCCESS;
8722}
8723
8724
8725/** Opcode 0xdb !11/7. */
8726FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8727{
8728 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8729 IEM_MC_BEGIN(3, 2);
8730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8731 IEM_MC_LOCAL(uint16_t, u16Fsw);
8732 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8733 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8735
8736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8740
8741 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8742 IEM_MC_PREPARE_FPU_USAGE();
8743 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8744 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8745 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8746 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8747 IEM_MC_ELSE()
8748 IEM_MC_IF_FCW_IM()
8749 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8750 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8751 IEM_MC_ENDIF();
8752 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8753 IEM_MC_ENDIF();
8754 IEM_MC_ADVANCE_RIP();
8755
8756 IEM_MC_END();
8757 return VINF_SUCCESS;
8758}
8759
8760
8761/** Opcode 0xdb 11/0. */
8762FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8763{
8764 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8766
8767 IEM_MC_BEGIN(0, 1);
8768 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8769
8770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8772
8773 IEM_MC_PREPARE_FPU_USAGE();
8774 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8775 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8776 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8777 IEM_MC_ENDIF();
8778 IEM_MC_UPDATE_FPU_OPCODE_IP();
8779 IEM_MC_ELSE()
8780 IEM_MC_FPU_STACK_UNDERFLOW(0);
8781 IEM_MC_ENDIF();
8782 IEM_MC_ADVANCE_RIP();
8783
8784 IEM_MC_END();
8785 return VINF_SUCCESS;
8786}
8787
8788
8789/** Opcode 0xdb 11/1. */
8790FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8791{
8792 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8794
8795 IEM_MC_BEGIN(0, 1);
8796 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8797
8798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8799 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8800
8801 IEM_MC_PREPARE_FPU_USAGE();
8802 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8803 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8804 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8805 IEM_MC_ENDIF();
8806 IEM_MC_UPDATE_FPU_OPCODE_IP();
8807 IEM_MC_ELSE()
8808 IEM_MC_FPU_STACK_UNDERFLOW(0);
8809 IEM_MC_ENDIF();
8810 IEM_MC_ADVANCE_RIP();
8811
8812 IEM_MC_END();
8813 return VINF_SUCCESS;
8814}
8815
8816
8817/** Opcode 0xdb 11/2. */
8818FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8819{
8820 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8822
8823 IEM_MC_BEGIN(0, 1);
8824 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8825
8826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8827 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8828
8829 IEM_MC_PREPARE_FPU_USAGE();
8830 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8831 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8832 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8833 IEM_MC_ENDIF();
8834 IEM_MC_UPDATE_FPU_OPCODE_IP();
8835 IEM_MC_ELSE()
8836 IEM_MC_FPU_STACK_UNDERFLOW(0);
8837 IEM_MC_ENDIF();
8838 IEM_MC_ADVANCE_RIP();
8839
8840 IEM_MC_END();
8841 return VINF_SUCCESS;
8842}
8843
8844
8845/** Opcode 0xdb 11/3. */
8846FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8847{
8848 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8850
8851 IEM_MC_BEGIN(0, 1);
8852 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8853
8854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8856
8857 IEM_MC_PREPARE_FPU_USAGE();
8858 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8859 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8860 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8861 IEM_MC_ENDIF();
8862 IEM_MC_UPDATE_FPU_OPCODE_IP();
8863 IEM_MC_ELSE()
8864 IEM_MC_FPU_STACK_UNDERFLOW(0);
8865 IEM_MC_ENDIF();
8866 IEM_MC_ADVANCE_RIP();
8867
8868 IEM_MC_END();
8869 return VINF_SUCCESS;
8870}
8871
8872
8873/** Opcode 0xdb 0xe0. */
8874FNIEMOP_DEF(iemOp_fneni)
8875{
8876 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8878 IEM_MC_BEGIN(0,0);
8879 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8880 IEM_MC_ADVANCE_RIP();
8881 IEM_MC_END();
8882 return VINF_SUCCESS;
8883}
8884
8885
8886/** Opcode 0xdb 0xe1. */
8887FNIEMOP_DEF(iemOp_fndisi)
8888{
8889 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891 IEM_MC_BEGIN(0,0);
8892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 return VINF_SUCCESS;
8896}
8897
8898
8899/** Opcode 0xdb 0xe2. */
8900FNIEMOP_DEF(iemOp_fnclex)
8901{
8902 IEMOP_MNEMONIC(fnclex, "fnclex");
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904
8905 IEM_MC_BEGIN(0,0);
8906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8907 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8908 IEM_MC_CLEAR_FSW_EX();
8909 IEM_MC_ADVANCE_RIP();
8910 IEM_MC_END();
8911 return VINF_SUCCESS;
8912}
8913
8914
8915/** Opcode 0xdb 0xe3. */
8916FNIEMOP_DEF(iemOp_fninit)
8917{
8918 IEMOP_MNEMONIC(fninit, "fninit");
8919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8920 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8921}
8922
8923
8924/** Opcode 0xdb 0xe4. */
8925FNIEMOP_DEF(iemOp_fnsetpm)
8926{
8927 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8929 IEM_MC_BEGIN(0,0);
8930 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8931 IEM_MC_ADVANCE_RIP();
8932 IEM_MC_END();
8933 return VINF_SUCCESS;
8934}
8935
8936
8937/** Opcode 0xdb 0xe5. */
8938FNIEMOP_DEF(iemOp_frstpm)
8939{
8940 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8941#if 0 /* #UDs on newer CPUs */
8942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8943 IEM_MC_BEGIN(0,0);
8944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8945 IEM_MC_ADVANCE_RIP();
8946 IEM_MC_END();
8947 return VINF_SUCCESS;
8948#else
8949 return IEMOP_RAISE_INVALID_OPCODE();
8950#endif
8951}
8952
8953
8954/** Opcode 0xdb 11/5. */
8955FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8956{
8957 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8958 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8959}
8960
8961
8962/** Opcode 0xdb 11/6. */
8963FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8964{
8965 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8966 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8967}
8968
8969
8970/**
8971 * @opcode 0xdb
8972 */
8973FNIEMOP_DEF(iemOp_EscF3)
8974{
8975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8976 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8978 {
8979 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8980 {
8981 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8982 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8983 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8984 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8985 case 4:
8986 switch (bRm)
8987 {
8988 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8989 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8990 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8991 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8992 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8993 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8994 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8995 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8997 }
8998 break;
8999 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9000 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9001 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9003 }
9004 }
9005 else
9006 {
9007 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9008 {
9009 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9010 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9011 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9012 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9013 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9014 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9015 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9016 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9018 }
9019 }
9020}
9021
9022
9023/**
9024 * Common worker for FPU instructions working on STn and ST0, and storing the
9025 * result in STn unless IE, DE or ZE was raised.
9026 *
9027 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9028 */
9029FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9030{
9031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9032
9033 IEM_MC_BEGIN(3, 1);
9034 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9035 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9036 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9037 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9038
9039 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9040 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9041
9042 IEM_MC_PREPARE_FPU_USAGE();
9043 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9044 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9045 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9046 IEM_MC_ELSE()
9047 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9048 IEM_MC_ENDIF();
9049 IEM_MC_ADVANCE_RIP();
9050
9051 IEM_MC_END();
9052 return VINF_SUCCESS;
9053}
9054
9055
9056/** Opcode 0xdc 11/0. */
9057FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9058{
9059 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9060 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9061}
9062
9063
9064/** Opcode 0xdc 11/1. */
9065FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9066{
9067 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9068 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9069}
9070
9071
9072/** Opcode 0xdc 11/4. */
9073FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9074{
9075 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9076 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9077}
9078
9079
9080/** Opcode 0xdc 11/5. */
9081FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9082{
9083 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9084 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9085}
9086
9087
9088/** Opcode 0xdc 11/6. */
9089FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9090{
9091 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9092 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9093}
9094
9095
9096/** Opcode 0xdc 11/7. */
9097FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9098{
9099 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9100 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9101}
9102
9103
9104/**
9105 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9106 * memory operand, and storing the result in ST0.
9107 *
9108 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9109 */
9110FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9111{
9112 IEM_MC_BEGIN(3, 3);
9113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9114 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9115 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9116 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9117 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9118 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9119
9120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9124
9125 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9126 IEM_MC_PREPARE_FPU_USAGE();
9127 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9128 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9129 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9130 IEM_MC_ELSE()
9131 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9132 IEM_MC_ENDIF();
9133 IEM_MC_ADVANCE_RIP();
9134
9135 IEM_MC_END();
9136 return VINF_SUCCESS;
9137}
9138
9139
9140/** Opcode 0xdc !11/0. */
9141FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9142{
9143 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9144 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9145}
9146
9147
9148/** Opcode 0xdc !11/1. */
9149FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9150{
9151 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9152 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9153}
9154
9155
9156/** Opcode 0xdc !11/2. */
9157FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9158{
9159 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9160
9161 IEM_MC_BEGIN(3, 3);
9162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9163 IEM_MC_LOCAL(uint16_t, u16Fsw);
9164 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9165 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9166 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9167 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9168
9169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9171
9172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9174 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9175
9176 IEM_MC_PREPARE_FPU_USAGE();
9177 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9178 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9179 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9180 IEM_MC_ELSE()
9181 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9182 IEM_MC_ENDIF();
9183 IEM_MC_ADVANCE_RIP();
9184
9185 IEM_MC_END();
9186 return VINF_SUCCESS;
9187}
9188
9189
9190/** Opcode 0xdc !11/3. */
9191FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9192{
9193 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9194
9195 IEM_MC_BEGIN(3, 3);
9196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9197 IEM_MC_LOCAL(uint16_t, u16Fsw);
9198 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9199 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9201 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9202
9203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9205
9206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9208 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9209
9210 IEM_MC_PREPARE_FPU_USAGE();
9211 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9212 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9213 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9214 IEM_MC_ELSE()
9215 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9216 IEM_MC_ENDIF();
9217 IEM_MC_ADVANCE_RIP();
9218
9219 IEM_MC_END();
9220 return VINF_SUCCESS;
9221}
9222
9223
9224/** Opcode 0xdc !11/4. */
9225FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9226{
9227 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9228 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9229}
9230
9231
9232/** Opcode 0xdc !11/5. */
9233FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9234{
9235 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9236 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9237}
9238
9239
9240/** Opcode 0xdc !11/6. */
9241FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9242{
9243 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9244 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9245}
9246
9247
9248/** Opcode 0xdc !11/7. */
9249FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9250{
9251 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9252 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9253}
9254
9255
9256/**
9257 * @opcode 0xdc
9258 */
9259FNIEMOP_DEF(iemOp_EscF4)
9260{
9261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9262 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9264 {
9265 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9266 {
9267 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9268 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9269 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9270 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9271 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9272 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9273 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9274 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9276 }
9277 }
9278 else
9279 {
9280 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9281 {
9282 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9283 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9284 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9285 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9286 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9287 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9288 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9289 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9291 }
9292 }
9293}
9294
9295
9296/** Opcode 0xdd !11/0.
9297 * @sa iemOp_fld_m32r */
9298FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9299{
9300 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9301
9302 IEM_MC_BEGIN(2, 3);
9303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9304 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9305 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9306 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9307 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9308
9309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9312 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9313
9314 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9315 IEM_MC_PREPARE_FPU_USAGE();
9316 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9317 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9318 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9319 IEM_MC_ELSE()
9320 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9321 IEM_MC_ENDIF();
9322 IEM_MC_ADVANCE_RIP();
9323
9324 IEM_MC_END();
9325 return VINF_SUCCESS;
9326}
9327
9328
9329/** Opcode 0xdd !11/0. */
9330FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9331{
9332 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9333 IEM_MC_BEGIN(3, 2);
9334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9335 IEM_MC_LOCAL(uint16_t, u16Fsw);
9336 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9337 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9338 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9339
9340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9344
9345 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9346 IEM_MC_PREPARE_FPU_USAGE();
9347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9349 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9350 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9351 IEM_MC_ELSE()
9352 IEM_MC_IF_FCW_IM()
9353 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9354 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9355 IEM_MC_ENDIF();
9356 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9357 IEM_MC_ENDIF();
9358 IEM_MC_ADVANCE_RIP();
9359
9360 IEM_MC_END();
9361 return VINF_SUCCESS;
9362}
9363
9364
9365/** Opcode 0xdd !11/0. */
9366FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9367{
9368 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9369 IEM_MC_BEGIN(3, 2);
9370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9371 IEM_MC_LOCAL(uint16_t, u16Fsw);
9372 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9373 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9374 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9375
9376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9380
9381 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9382 IEM_MC_PREPARE_FPU_USAGE();
9383 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9384 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9385 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9386 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9387 IEM_MC_ELSE()
9388 IEM_MC_IF_FCW_IM()
9389 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9390 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9391 IEM_MC_ENDIF();
9392 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9393 IEM_MC_ENDIF();
9394 IEM_MC_ADVANCE_RIP();
9395
9396 IEM_MC_END();
9397 return VINF_SUCCESS;
9398}
9399
9400
9401
9402
9403/** Opcode 0xdd !11/0. */
9404FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9405{
9406 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9407 IEM_MC_BEGIN(3, 2);
9408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9409 IEM_MC_LOCAL(uint16_t, u16Fsw);
9410 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9411 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9412 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9413
9414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9418
9419 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9420 IEM_MC_PREPARE_FPU_USAGE();
9421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9422 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9423 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9424 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9425 IEM_MC_ELSE()
9426 IEM_MC_IF_FCW_IM()
9427 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9428 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9429 IEM_MC_ENDIF();
9430 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9431 IEM_MC_ENDIF();
9432 IEM_MC_ADVANCE_RIP();
9433
9434 IEM_MC_END();
9435 return VINF_SUCCESS;
9436}
9437
9438
9439/** Opcode 0xdd !11/0. */
9440FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9441{
9442 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9443 IEM_MC_BEGIN(3, 0);
9444 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9445 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9446 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9450 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9451 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9452 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9453 IEM_MC_END();
9454 return VINF_SUCCESS;
9455}
9456
9457
9458/** Opcode 0xdd !11/0. */
9459FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9460{
9461 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9462 IEM_MC_BEGIN(3, 0);
9463 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9464 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9465 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9469 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9470 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9471 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9472 IEM_MC_END();
9473 return VINF_SUCCESS;
9474
9475}
9476
9477/** Opcode 0xdd !11/0. */
9478FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9479{
9480 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9481
9482 IEM_MC_BEGIN(0, 2);
9483 IEM_MC_LOCAL(uint16_t, u16Tmp);
9484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9485
9486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9488 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9489
9490 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9491 IEM_MC_FETCH_FSW(u16Tmp);
9492 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9493 IEM_MC_ADVANCE_RIP();
9494
9495/** @todo Debug / drop a hint to the verifier that things may differ
9496 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9497 * NT4SP1. (X86_FSW_PE) */
9498 IEM_MC_END();
9499 return VINF_SUCCESS;
9500}
9501
9502
9503/** Opcode 0xdd 11/0. */
9504FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9505{
9506 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9508 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9509 unmodified. */
9510
9511 IEM_MC_BEGIN(0, 0);
9512
9513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9515
9516 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9517 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9518 IEM_MC_UPDATE_FPU_OPCODE_IP();
9519
9520 IEM_MC_ADVANCE_RIP();
9521 IEM_MC_END();
9522 return VINF_SUCCESS;
9523}
9524
9525
9526/** Opcode 0xdd 11/1. */
9527FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9528{
9529 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9531
9532 IEM_MC_BEGIN(0, 2);
9533 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9534 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9537
9538 IEM_MC_PREPARE_FPU_USAGE();
9539 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9540 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9541 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9542 IEM_MC_ELSE()
9543 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9544 IEM_MC_ENDIF();
9545
9546 IEM_MC_ADVANCE_RIP();
9547 IEM_MC_END();
9548 return VINF_SUCCESS;
9549}
9550
9551
9552/** Opcode 0xdd 11/3. */
9553FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9554{
9555 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9556 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9557}
9558
9559
9560/** Opcode 0xdd 11/4. */
9561FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9562{
9563 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9564 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9565}
9566
9567
9568/**
9569 * @opcode 0xdd
9570 */
9571FNIEMOP_DEF(iemOp_EscF5)
9572{
9573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9574 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9576 {
9577 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9578 {
9579 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9580 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9581 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9582 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9583 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9584 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9585 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9586 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9588 }
9589 }
9590 else
9591 {
9592 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9593 {
9594 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9595 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9596 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9597 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9598 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9599 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9600 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9601 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9603 }
9604 }
9605}
9606
9607
9608/** Opcode 0xde 11/0. */
9609FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9610{
9611 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9612 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9613}
9614
9615
9616/** Opcode 0xde 11/0. */
9617FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9618{
9619 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9620 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9621}
9622
9623
9624/** Opcode 0xde 0xd9. */
9625FNIEMOP_DEF(iemOp_fcompp)
9626{
9627 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9628 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9629}
9630
9631
9632/** Opcode 0xde 11/4. */
9633FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9634{
9635 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9636 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9637}
9638
9639
9640/** Opcode 0xde 11/5. */
9641FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9642{
9643 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9644 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9645}
9646
9647
9648/** Opcode 0xde 11/6. */
9649FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9650{
9651 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9652 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9653}
9654
9655
9656/** Opcode 0xde 11/7. */
9657FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9658{
9659 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9660 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9661}
9662
9663
9664/**
9665 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9666 * the result in ST0.
9667 *
9668 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9669 */
9670FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9671{
9672 IEM_MC_BEGIN(3, 3);
9673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9674 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9675 IEM_MC_LOCAL(int16_t, i16Val2);
9676 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9677 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9678 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9679
9680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9682
9683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9684 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9685 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9686
9687 IEM_MC_PREPARE_FPU_USAGE();
9688 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9689 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9690 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9691 IEM_MC_ELSE()
9692 IEM_MC_FPU_STACK_UNDERFLOW(0);
9693 IEM_MC_ENDIF();
9694 IEM_MC_ADVANCE_RIP();
9695
9696 IEM_MC_END();
9697 return VINF_SUCCESS;
9698}
9699
9700
9701/** Opcode 0xde !11/0. */
9702FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9703{
9704 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9705 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9706}
9707
9708
9709/** Opcode 0xde !11/1. */
9710FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9711{
9712 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9713 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9714}
9715
9716
9717/** Opcode 0xde !11/2. */
9718FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9719{
9720 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9721
9722 IEM_MC_BEGIN(3, 3);
9723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9724 IEM_MC_LOCAL(uint16_t, u16Fsw);
9725 IEM_MC_LOCAL(int16_t, i16Val2);
9726 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9727 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9728 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9729
9730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732
9733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9735 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9736
9737 IEM_MC_PREPARE_FPU_USAGE();
9738 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9739 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9740 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9741 IEM_MC_ELSE()
9742 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9743 IEM_MC_ENDIF();
9744 IEM_MC_ADVANCE_RIP();
9745
9746 IEM_MC_END();
9747 return VINF_SUCCESS;
9748}
9749
9750
9751/** Opcode 0xde !11/3. */
9752FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9753{
9754 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9755
9756 IEM_MC_BEGIN(3, 3);
9757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9758 IEM_MC_LOCAL(uint16_t, u16Fsw);
9759 IEM_MC_LOCAL(int16_t, i16Val2);
9760 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9761 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9762 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9763
9764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9766
9767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9768 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9769 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9770
9771 IEM_MC_PREPARE_FPU_USAGE();
9772 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9773 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9774 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9775 IEM_MC_ELSE()
9776 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9777 IEM_MC_ENDIF();
9778 IEM_MC_ADVANCE_RIP();
9779
9780 IEM_MC_END();
9781 return VINF_SUCCESS;
9782}
9783
9784
9785/** Opcode 0xde !11/4. */
9786FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9787{
9788 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9789 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9790}
9791
9792
9793/** Opcode 0xde !11/5. */
9794FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9795{
9796 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9797 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9798}
9799
9800
9801/** Opcode 0xde !11/6. */
9802FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9803{
9804 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9805 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9806}
9807
9808
9809/** Opcode 0xde !11/7. */
9810FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9811{
9812 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9813 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9814}
9815
9816
9817/**
9818 * @opcode 0xde
9819 */
9820FNIEMOP_DEF(iemOp_EscF6)
9821{
9822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9823 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9825 {
9826 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9827 {
9828 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9829 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9830 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9831 case 3: if (bRm == 0xd9)
9832 return FNIEMOP_CALL(iemOp_fcompp);
9833 return IEMOP_RAISE_INVALID_OPCODE();
9834 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9835 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9836 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9837 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9839 }
9840 }
9841 else
9842 {
9843 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9844 {
9845 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9846 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9847 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9848 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9849 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9850 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9851 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9852 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9854 }
9855 }
9856}
9857
9858
9859/** Opcode 0xdf 11/0.
9860 * Undocument instruction, assumed to work like ffree + fincstp. */
9861FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9862{
9863 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9865
9866 IEM_MC_BEGIN(0, 0);
9867
9868 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9869 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9870
9871 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9872 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9873 IEM_MC_FPU_STACK_INC_TOP();
9874 IEM_MC_UPDATE_FPU_OPCODE_IP();
9875
9876 IEM_MC_ADVANCE_RIP();
9877 IEM_MC_END();
9878 return VINF_SUCCESS;
9879}
9880
9881
9882/** Opcode 0xdf 0xe0. */
9883FNIEMOP_DEF(iemOp_fnstsw_ax)
9884{
9885 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9887
9888 IEM_MC_BEGIN(0, 1);
9889 IEM_MC_LOCAL(uint16_t, u16Tmp);
9890 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9891 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9892 IEM_MC_FETCH_FSW(u16Tmp);
9893 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9894 IEM_MC_ADVANCE_RIP();
9895 IEM_MC_END();
9896 return VINF_SUCCESS;
9897}
9898
9899
9900/** Opcode 0xdf 11/5. */
9901FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9902{
9903 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9904 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9905}
9906
9907
9908/** Opcode 0xdf 11/6. */
9909FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9910{
9911 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9912 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9913}
9914
9915
9916/** Opcode 0xdf !11/0. */
9917FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9918{
9919 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9920
9921 IEM_MC_BEGIN(2, 3);
9922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9923 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9924 IEM_MC_LOCAL(int16_t, i16Val);
9925 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9926 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9927
9928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9930
9931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9933 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9934
9935 IEM_MC_PREPARE_FPU_USAGE();
9936 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9937 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9938 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9939 IEM_MC_ELSE()
9940 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9941 IEM_MC_ENDIF();
9942 IEM_MC_ADVANCE_RIP();
9943
9944 IEM_MC_END();
9945 return VINF_SUCCESS;
9946}
9947
9948
9949/** Opcode 0xdf !11/1. */
9950FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9951{
9952 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9953 IEM_MC_BEGIN(3, 2);
9954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9955 IEM_MC_LOCAL(uint16_t, u16Fsw);
9956 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9957 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9958 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9959
9960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9962 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9964
9965 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9966 IEM_MC_PREPARE_FPU_USAGE();
9967 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9968 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9969 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9970 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9971 IEM_MC_ELSE()
9972 IEM_MC_IF_FCW_IM()
9973 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9974 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9975 IEM_MC_ENDIF();
9976 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9977 IEM_MC_ENDIF();
9978 IEM_MC_ADVANCE_RIP();
9979
9980 IEM_MC_END();
9981 return VINF_SUCCESS;
9982}
9983
9984
9985/** Opcode 0xdf !11/2. */
9986FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9987{
9988 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9989 IEM_MC_BEGIN(3, 2);
9990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9991 IEM_MC_LOCAL(uint16_t, u16Fsw);
9992 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9993 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9995
9996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10000
10001 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10002 IEM_MC_PREPARE_FPU_USAGE();
10003 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10004 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10005 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10006 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10007 IEM_MC_ELSE()
10008 IEM_MC_IF_FCW_IM()
10009 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10010 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10011 IEM_MC_ENDIF();
10012 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10013 IEM_MC_ENDIF();
10014 IEM_MC_ADVANCE_RIP();
10015
10016 IEM_MC_END();
10017 return VINF_SUCCESS;
10018}
10019
10020
10021/** Opcode 0xdf !11/3. */
10022FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10023{
10024 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10025 IEM_MC_BEGIN(3, 2);
10026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10027 IEM_MC_LOCAL(uint16_t, u16Fsw);
10028 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10029 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10030 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10031
10032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10036
10037 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10038 IEM_MC_PREPARE_FPU_USAGE();
10039 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10040 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10041 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10042 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10043 IEM_MC_ELSE()
10044 IEM_MC_IF_FCW_IM()
10045 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10046 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10047 IEM_MC_ENDIF();
10048 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10049 IEM_MC_ENDIF();
10050 IEM_MC_ADVANCE_RIP();
10051
10052 IEM_MC_END();
10053 return VINF_SUCCESS;
10054}
10055
10056
10057/** Opcode 0xdf !11/4. */
10058FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10059
10060
10061/** Opcode 0xdf !11/5. */
10062FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10063{
10064 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10065
10066 IEM_MC_BEGIN(2, 3);
10067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10068 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10069 IEM_MC_LOCAL(int64_t, i64Val);
10070 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10071 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10072
10073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10075
10076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10078 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10079
10080 IEM_MC_PREPARE_FPU_USAGE();
10081 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10082 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10083 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10084 IEM_MC_ELSE()
10085 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10086 IEM_MC_ENDIF();
10087 IEM_MC_ADVANCE_RIP();
10088
10089 IEM_MC_END();
10090 return VINF_SUCCESS;
10091}
10092
10093
10094/** Opcode 0xdf !11/6. */
10095FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
10096
10097
10098/** Opcode 0xdf !11/7. */
10099FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10100{
10101 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10102 IEM_MC_BEGIN(3, 2);
10103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10104 IEM_MC_LOCAL(uint16_t, u16Fsw);
10105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10106 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10107 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10108
10109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10113
10114 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10115 IEM_MC_PREPARE_FPU_USAGE();
10116 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10117 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10118 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10119 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10120 IEM_MC_ELSE()
10121 IEM_MC_IF_FCW_IM()
10122 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10123 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10124 IEM_MC_ENDIF();
10125 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10126 IEM_MC_ENDIF();
10127 IEM_MC_ADVANCE_RIP();
10128
10129 IEM_MC_END();
10130 return VINF_SUCCESS;
10131}
10132
10133
10134/**
10135 * @opcode 0xdf
10136 */
10137FNIEMOP_DEF(iemOp_EscF7)
10138{
10139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10141 {
10142 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10143 {
10144 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10145 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10146 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10147 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10148 case 4: if (bRm == 0xe0)
10149 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10150 return IEMOP_RAISE_INVALID_OPCODE();
10151 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10152 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10153 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10155 }
10156 }
10157 else
10158 {
10159 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10160 {
10161 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10162 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10163 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10164 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10165 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10166 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10167 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10168 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10170 }
10171 }
10172}
10173
10174
10175/**
10176 * @opcode 0xe0
10177 */
10178FNIEMOP_DEF(iemOp_loopne_Jb)
10179{
10180 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10181 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10184
10185 switch (pVCpu->iem.s.enmEffAddrMode)
10186 {
10187 case IEMMODE_16BIT:
10188 IEM_MC_BEGIN(0,0);
10189 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10190 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10191 IEM_MC_REL_JMP_S8(i8Imm);
10192 } IEM_MC_ELSE() {
10193 IEM_MC_ADVANCE_RIP();
10194 } IEM_MC_ENDIF();
10195 IEM_MC_END();
10196 return VINF_SUCCESS;
10197
10198 case IEMMODE_32BIT:
10199 IEM_MC_BEGIN(0,0);
10200 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10201 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10202 IEM_MC_REL_JMP_S8(i8Imm);
10203 } IEM_MC_ELSE() {
10204 IEM_MC_ADVANCE_RIP();
10205 } IEM_MC_ENDIF();
10206 IEM_MC_END();
10207 return VINF_SUCCESS;
10208
10209 case IEMMODE_64BIT:
10210 IEM_MC_BEGIN(0,0);
10211 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10212 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10213 IEM_MC_REL_JMP_S8(i8Imm);
10214 } IEM_MC_ELSE() {
10215 IEM_MC_ADVANCE_RIP();
10216 } IEM_MC_ENDIF();
10217 IEM_MC_END();
10218 return VINF_SUCCESS;
10219
10220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10221 }
10222}
10223
10224
10225/**
10226 * @opcode 0xe1
10227 */
10228FNIEMOP_DEF(iemOp_loope_Jb)
10229{
10230 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10231 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10234
10235 switch (pVCpu->iem.s.enmEffAddrMode)
10236 {
10237 case IEMMODE_16BIT:
10238 IEM_MC_BEGIN(0,0);
10239 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10240 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10241 IEM_MC_REL_JMP_S8(i8Imm);
10242 } IEM_MC_ELSE() {
10243 IEM_MC_ADVANCE_RIP();
10244 } IEM_MC_ENDIF();
10245 IEM_MC_END();
10246 return VINF_SUCCESS;
10247
10248 case IEMMODE_32BIT:
10249 IEM_MC_BEGIN(0,0);
10250 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10251 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10252 IEM_MC_REL_JMP_S8(i8Imm);
10253 } IEM_MC_ELSE() {
10254 IEM_MC_ADVANCE_RIP();
10255 } IEM_MC_ENDIF();
10256 IEM_MC_END();
10257 return VINF_SUCCESS;
10258
10259 case IEMMODE_64BIT:
10260 IEM_MC_BEGIN(0,0);
10261 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10262 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10263 IEM_MC_REL_JMP_S8(i8Imm);
10264 } IEM_MC_ELSE() {
10265 IEM_MC_ADVANCE_RIP();
10266 } IEM_MC_ENDIF();
10267 IEM_MC_END();
10268 return VINF_SUCCESS;
10269
10270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10271 }
10272}
10273
10274
10275/**
10276 * @opcode 0xe2
10277 */
10278FNIEMOP_DEF(iemOp_loop_Jb)
10279{
10280 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10281 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10284
10285 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10286 * using the 32-bit operand size override. How can that be restarted? See
10287 * weird pseudo code in intel manual. */
10288
10289 /** NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10290 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10291 * the loop causes guest crashes, but when logging it's nice to skip a few million
10292 * lines of useless output. */
10293#if defined(LOG_ENABLED)
10294 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10295 switch (pVCpu->iem.s.enmEffAddrMode)
10296 {
10297 case IEMMODE_16BIT:
10298 IEM_MC_BEGIN(0,0);
10299 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10300 IEM_MC_ADVANCE_RIP();
10301 IEM_MC_END();
10302 return VINF_SUCCESS;
10303
10304 case IEMMODE_32BIT:
10305 IEM_MC_BEGIN(0,0);
10306 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10307 IEM_MC_ADVANCE_RIP();
10308 IEM_MC_END();
10309 return VINF_SUCCESS;
10310
10311 case IEMMODE_64BIT:
10312 IEM_MC_BEGIN(0,0);
10313 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10314 IEM_MC_ADVANCE_RIP();
10315 IEM_MC_END();
10316 return VINF_SUCCESS;
10317
10318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10319 }
10320#endif
10321
10322 switch (pVCpu->iem.s.enmEffAddrMode)
10323 {
10324 case IEMMODE_16BIT:
10325 IEM_MC_BEGIN(0,0);
10326
10327 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10328 IEM_MC_IF_CX_IS_NZ() {
10329 IEM_MC_REL_JMP_S8(i8Imm);
10330 } IEM_MC_ELSE() {
10331 IEM_MC_ADVANCE_RIP();
10332 } IEM_MC_ENDIF();
10333 IEM_MC_END();
10334 return VINF_SUCCESS;
10335
10336 case IEMMODE_32BIT:
10337 IEM_MC_BEGIN(0,0);
10338 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10339 IEM_MC_IF_ECX_IS_NZ() {
10340 IEM_MC_REL_JMP_S8(i8Imm);
10341 } IEM_MC_ELSE() {
10342 IEM_MC_ADVANCE_RIP();
10343 } IEM_MC_ENDIF();
10344 IEM_MC_END();
10345 return VINF_SUCCESS;
10346
10347 case IEMMODE_64BIT:
10348 IEM_MC_BEGIN(0,0);
10349 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10350 IEM_MC_IF_RCX_IS_NZ() {
10351 IEM_MC_REL_JMP_S8(i8Imm);
10352 } IEM_MC_ELSE() {
10353 IEM_MC_ADVANCE_RIP();
10354 } IEM_MC_ENDIF();
10355 IEM_MC_END();
10356 return VINF_SUCCESS;
10357
10358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10359 }
10360}
10361
10362
10363/**
10364 * @opcode 0xe3
10365 */
10366FNIEMOP_DEF(iemOp_jecxz_Jb)
10367{
10368 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10369 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10371 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10372
10373 switch (pVCpu->iem.s.enmEffAddrMode)
10374 {
10375 case IEMMODE_16BIT:
10376 IEM_MC_BEGIN(0,0);
10377 IEM_MC_IF_CX_IS_NZ() {
10378 IEM_MC_ADVANCE_RIP();
10379 } IEM_MC_ELSE() {
10380 IEM_MC_REL_JMP_S8(i8Imm);
10381 } IEM_MC_ENDIF();
10382 IEM_MC_END();
10383 return VINF_SUCCESS;
10384
10385 case IEMMODE_32BIT:
10386 IEM_MC_BEGIN(0,0);
10387 IEM_MC_IF_ECX_IS_NZ() {
10388 IEM_MC_ADVANCE_RIP();
10389 } IEM_MC_ELSE() {
10390 IEM_MC_REL_JMP_S8(i8Imm);
10391 } IEM_MC_ENDIF();
10392 IEM_MC_END();
10393 return VINF_SUCCESS;
10394
10395 case IEMMODE_64BIT:
10396 IEM_MC_BEGIN(0,0);
10397 IEM_MC_IF_RCX_IS_NZ() {
10398 IEM_MC_ADVANCE_RIP();
10399 } IEM_MC_ELSE() {
10400 IEM_MC_REL_JMP_S8(i8Imm);
10401 } IEM_MC_ENDIF();
10402 IEM_MC_END();
10403 return VINF_SUCCESS;
10404
10405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10406 }
10407}
10408
10409
10410/** Opcode 0xe4 */
10411FNIEMOP_DEF(iemOp_in_AL_Ib)
10412{
10413 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10414 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10416 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10417}
10418
10419
10420/** Opcode 0xe5 */
10421FNIEMOP_DEF(iemOp_in_eAX_Ib)
10422{
10423 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10424 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10426 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10427}
10428
10429
10430/** Opcode 0xe6 */
10431FNIEMOP_DEF(iemOp_out_Ib_AL)
10432{
10433 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10434 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10436 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10437}
10438
10439
10440/** Opcode 0xe7 */
10441FNIEMOP_DEF(iemOp_out_Ib_eAX)
10442{
10443 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10444 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10446 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10447}
10448
10449
10450/**
10451 * @opcode 0xe8
10452 */
10453FNIEMOP_DEF(iemOp_call_Jv)
10454{
10455 IEMOP_MNEMONIC(call_Jv, "call Jv");
10456 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10457 switch (pVCpu->iem.s.enmEffOpSize)
10458 {
10459 case IEMMODE_16BIT:
10460 {
10461 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10462 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10463 }
10464
10465 case IEMMODE_32BIT:
10466 {
10467 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10468 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10469 }
10470
10471 case IEMMODE_64BIT:
10472 {
10473 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10474 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10475 }
10476
10477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10478 }
10479}
10480
10481
10482/**
10483 * @opcode 0xe9
10484 */
10485FNIEMOP_DEF(iemOp_jmp_Jv)
10486{
10487 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10489 switch (pVCpu->iem.s.enmEffOpSize)
10490 {
10491 case IEMMODE_16BIT:
10492 {
10493 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10494 IEM_MC_BEGIN(0, 0);
10495 IEM_MC_REL_JMP_S16(i16Imm);
10496 IEM_MC_END();
10497 return VINF_SUCCESS;
10498 }
10499
10500 case IEMMODE_64BIT:
10501 case IEMMODE_32BIT:
10502 {
10503 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10504 IEM_MC_BEGIN(0, 0);
10505 IEM_MC_REL_JMP_S32(i32Imm);
10506 IEM_MC_END();
10507 return VINF_SUCCESS;
10508 }
10509
10510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10511 }
10512}
10513
10514
10515/**
10516 * @opcode 0xea
10517 */
10518FNIEMOP_DEF(iemOp_jmp_Ap)
10519{
10520 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10521 IEMOP_HLP_NO_64BIT();
10522
10523 /* Decode the far pointer address and pass it on to the far call C implementation. */
10524 uint32_t offSeg;
10525 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10526 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10527 else
10528 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10529 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10532}
10533
10534
10535/**
10536 * @opcode 0xeb
10537 */
10538FNIEMOP_DEF(iemOp_jmp_Jb)
10539{
10540 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10541 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10543 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10544
10545 IEM_MC_BEGIN(0, 0);
10546 IEM_MC_REL_JMP_S8(i8Imm);
10547 IEM_MC_END();
10548 return VINF_SUCCESS;
10549}
10550
10551
10552/** Opcode 0xec */
10553FNIEMOP_DEF(iemOp_in_AL_DX)
10554{
10555 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10557 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10558}
10559
10560
10561/** Opcode 0xed */
10562FNIEMOP_DEF(iemOp_in_eAX_DX)
10563{
10564 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10566 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10567}
10568
10569
10570/** Opcode 0xee */
10571FNIEMOP_DEF(iemOp_out_DX_AL)
10572{
10573 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10575 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10576}
10577
10578
10579/** Opcode 0xef */
10580FNIEMOP_DEF(iemOp_out_DX_eAX)
10581{
10582 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10584 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10585}
10586
10587
10588/**
10589 * @opcode 0xf0
10590 */
10591FNIEMOP_DEF(iemOp_lock)
10592{
10593 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10594 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10595
10596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10598}
10599
10600
10601/**
10602 * @opcode 0xf1
10603 */
10604FNIEMOP_DEF(iemOp_int1)
10605{
10606 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10607 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10608 /** @todo testcase! */
10609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10610}
10611
10612
10613/**
10614 * @opcode 0xf2
10615 */
10616FNIEMOP_DEF(iemOp_repne)
10617{
10618 /* This overrides any previous REPE prefix. */
10619 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10620 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10621 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10622
10623 /* For the 4 entry opcode tables, REPNZ overrides any previous
10624 REPZ and operand size prefixes. */
10625 pVCpu->iem.s.idxPrefix = 3;
10626
10627 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10628 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10629}
10630
10631
10632/**
10633 * @opcode 0xf3
10634 */
10635FNIEMOP_DEF(iemOp_repe)
10636{
10637 /* This overrides any previous REPNE prefix. */
10638 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10639 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10640 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10641
10642 /* For the 4 entry opcode tables, REPNZ overrides any previous
10643 REPNZ and operand size prefixes. */
10644 pVCpu->iem.s.idxPrefix = 2;
10645
10646 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10647 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10648}
10649
10650
10651/**
10652 * @opcode 0xf4
10653 */
10654FNIEMOP_DEF(iemOp_hlt)
10655{
10656 IEMOP_MNEMONIC(hlt, "hlt");
10657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10658 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10659}
10660
10661
10662/**
10663 * @opcode 0xf5
10664 */
10665FNIEMOP_DEF(iemOp_cmc)
10666{
10667 IEMOP_MNEMONIC(cmc, "cmc");
10668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10669 IEM_MC_BEGIN(0, 0);
10670 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10671 IEM_MC_ADVANCE_RIP();
10672 IEM_MC_END();
10673 return VINF_SUCCESS;
10674}
10675
10676
10677/**
10678 * Common implementation of 'inc/dec/not/neg Eb'.
10679 *
10680 * @param bRm The RM byte.
10681 * @param pImpl The instruction implementation.
10682 */
10683FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10684{
10685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10686 {
10687 /* register access */
10688 IEM_MC_BEGIN(2, 0);
10689 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10690 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10691 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10692 IEM_MC_REF_EFLAGS(pEFlags);
10693 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10694 IEM_MC_ADVANCE_RIP();
10695 IEM_MC_END();
10696 }
10697 else
10698 {
10699 /* memory access. */
10700 IEM_MC_BEGIN(2, 2);
10701 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10702 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10704
10705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10706 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10707 IEM_MC_FETCH_EFLAGS(EFlags);
10708 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10709 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10710 else
10711 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10712
10713 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10714 IEM_MC_COMMIT_EFLAGS(EFlags);
10715 IEM_MC_ADVANCE_RIP();
10716 IEM_MC_END();
10717 }
10718 return VINF_SUCCESS;
10719}
10720
10721
10722/**
10723 * Common implementation of 'inc/dec/not/neg Ev'.
10724 *
10725 * @param bRm The RM byte.
10726 * @param pImpl The instruction implementation.
10727 */
10728FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10729{
10730 /* Registers are handled by a common worker. */
10731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10732 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10733
10734 /* Memory we do here. */
10735 switch (pVCpu->iem.s.enmEffOpSize)
10736 {
10737 case IEMMODE_16BIT:
10738 IEM_MC_BEGIN(2, 2);
10739 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10740 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10742
10743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10744 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10745 IEM_MC_FETCH_EFLAGS(EFlags);
10746 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10747 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10748 else
10749 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10750
10751 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10752 IEM_MC_COMMIT_EFLAGS(EFlags);
10753 IEM_MC_ADVANCE_RIP();
10754 IEM_MC_END();
10755 return VINF_SUCCESS;
10756
10757 case IEMMODE_32BIT:
10758 IEM_MC_BEGIN(2, 2);
10759 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10762
10763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10764 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10765 IEM_MC_FETCH_EFLAGS(EFlags);
10766 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10767 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10768 else
10769 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10770
10771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10772 IEM_MC_COMMIT_EFLAGS(EFlags);
10773 IEM_MC_ADVANCE_RIP();
10774 IEM_MC_END();
10775 return VINF_SUCCESS;
10776
10777 case IEMMODE_64BIT:
10778 IEM_MC_BEGIN(2, 2);
10779 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10782
10783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10784 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10785 IEM_MC_FETCH_EFLAGS(EFlags);
10786 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10787 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10788 else
10789 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10790
10791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10792 IEM_MC_COMMIT_EFLAGS(EFlags);
10793 IEM_MC_ADVANCE_RIP();
10794 IEM_MC_END();
10795 return VINF_SUCCESS;
10796
10797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10798 }
10799}
10800
10801
10802/** Opcode 0xf6 /0. */
10803FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10804{
10805 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10806 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10807
10808 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10809 {
10810 /* register access */
10811 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10813
10814 IEM_MC_BEGIN(3, 0);
10815 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10816 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10817 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10818 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10819 IEM_MC_REF_EFLAGS(pEFlags);
10820 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10821 IEM_MC_ADVANCE_RIP();
10822 IEM_MC_END();
10823 }
10824 else
10825 {
10826 /* memory access. */
10827 IEM_MC_BEGIN(3, 2);
10828 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10829 IEM_MC_ARG(uint8_t, u8Src, 1);
10830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10832
10833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10834 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10835 IEM_MC_ASSIGN(u8Src, u8Imm);
10836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10837 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10838 IEM_MC_FETCH_EFLAGS(EFlags);
10839 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10840
10841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10842 IEM_MC_COMMIT_EFLAGS(EFlags);
10843 IEM_MC_ADVANCE_RIP();
10844 IEM_MC_END();
10845 }
10846 return VINF_SUCCESS;
10847}
10848
10849
10850/** Opcode 0xf7 /0. */
10851FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10852{
10853 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10855
10856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10857 {
10858 /* register access */
10859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10860 switch (pVCpu->iem.s.enmEffOpSize)
10861 {
10862 case IEMMODE_16BIT:
10863 {
10864 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10865 IEM_MC_BEGIN(3, 0);
10866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10867 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10869 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10870 IEM_MC_REF_EFLAGS(pEFlags);
10871 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10872 IEM_MC_ADVANCE_RIP();
10873 IEM_MC_END();
10874 return VINF_SUCCESS;
10875 }
10876
10877 case IEMMODE_32BIT:
10878 {
10879 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10880 IEM_MC_BEGIN(3, 0);
10881 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10882 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10883 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10884 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10885 IEM_MC_REF_EFLAGS(pEFlags);
10886 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10887 /* No clearing the high dword here - test doesn't write back the result. */
10888 IEM_MC_ADVANCE_RIP();
10889 IEM_MC_END();
10890 return VINF_SUCCESS;
10891 }
10892
10893 case IEMMODE_64BIT:
10894 {
10895 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10896 IEM_MC_BEGIN(3, 0);
10897 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10898 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10900 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10901 IEM_MC_REF_EFLAGS(pEFlags);
10902 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10903 IEM_MC_ADVANCE_RIP();
10904 IEM_MC_END();
10905 return VINF_SUCCESS;
10906 }
10907
10908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10909 }
10910 }
10911 else
10912 {
10913 /* memory access. */
10914 switch (pVCpu->iem.s.enmEffOpSize)
10915 {
10916 case IEMMODE_16BIT:
10917 {
10918 IEM_MC_BEGIN(3, 2);
10919 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10920 IEM_MC_ARG(uint16_t, u16Src, 1);
10921 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10923
10924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10926 IEM_MC_ASSIGN(u16Src, u16Imm);
10927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10928 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10929 IEM_MC_FETCH_EFLAGS(EFlags);
10930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10931
10932 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10933 IEM_MC_COMMIT_EFLAGS(EFlags);
10934 IEM_MC_ADVANCE_RIP();
10935 IEM_MC_END();
10936 return VINF_SUCCESS;
10937 }
10938
10939 case IEMMODE_32BIT:
10940 {
10941 IEM_MC_BEGIN(3, 2);
10942 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10943 IEM_MC_ARG(uint32_t, u32Src, 1);
10944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10946
10947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10948 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10949 IEM_MC_ASSIGN(u32Src, u32Imm);
10950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10951 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10952 IEM_MC_FETCH_EFLAGS(EFlags);
10953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10954
10955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10956 IEM_MC_COMMIT_EFLAGS(EFlags);
10957 IEM_MC_ADVANCE_RIP();
10958 IEM_MC_END();
10959 return VINF_SUCCESS;
10960 }
10961
10962 case IEMMODE_64BIT:
10963 {
10964 IEM_MC_BEGIN(3, 2);
10965 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10966 IEM_MC_ARG(uint64_t, u64Src, 1);
10967 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10969
10970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10971 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10972 IEM_MC_ASSIGN(u64Src, u64Imm);
10973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10974 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10975 IEM_MC_FETCH_EFLAGS(EFlags);
10976 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10977
10978 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10979 IEM_MC_COMMIT_EFLAGS(EFlags);
10980 IEM_MC_ADVANCE_RIP();
10981 IEM_MC_END();
10982 return VINF_SUCCESS;
10983 }
10984
10985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10986 }
10987 }
10988}
10989
10990
10991/** Opcode 0xf6 /4, /5, /6 and /7. */
10992FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10993{
10994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10995 {
10996 /* register access */
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 IEM_MC_BEGIN(3, 1);
10999 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11000 IEM_MC_ARG(uint8_t, u8Value, 1);
11001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11002 IEM_MC_LOCAL(int32_t, rc);
11003
11004 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11005 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11006 IEM_MC_REF_EFLAGS(pEFlags);
11007 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11008 IEM_MC_IF_LOCAL_IS_Z(rc) {
11009 IEM_MC_ADVANCE_RIP();
11010 } IEM_MC_ELSE() {
11011 IEM_MC_RAISE_DIVIDE_ERROR();
11012 } IEM_MC_ENDIF();
11013
11014 IEM_MC_END();
11015 }
11016 else
11017 {
11018 /* memory access. */
11019 IEM_MC_BEGIN(3, 2);
11020 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11021 IEM_MC_ARG(uint8_t, u8Value, 1);
11022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11024 IEM_MC_LOCAL(int32_t, rc);
11025
11026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11028 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11029 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11030 IEM_MC_REF_EFLAGS(pEFlags);
11031 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11032 IEM_MC_IF_LOCAL_IS_Z(rc) {
11033 IEM_MC_ADVANCE_RIP();
11034 } IEM_MC_ELSE() {
11035 IEM_MC_RAISE_DIVIDE_ERROR();
11036 } IEM_MC_ENDIF();
11037
11038 IEM_MC_END();
11039 }
11040 return VINF_SUCCESS;
11041}
11042
11043
11044/** Opcode 0xf7 /4, /5, /6 and /7. */
11045FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11046{
11047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11048
11049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11050 {
11051 /* register access */
11052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11053 switch (pVCpu->iem.s.enmEffOpSize)
11054 {
11055 case IEMMODE_16BIT:
11056 {
11057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11058 IEM_MC_BEGIN(4, 1);
11059 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11060 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11061 IEM_MC_ARG(uint16_t, u16Value, 2);
11062 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11063 IEM_MC_LOCAL(int32_t, rc);
11064
11065 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11066 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11067 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11068 IEM_MC_REF_EFLAGS(pEFlags);
11069 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11070 IEM_MC_IF_LOCAL_IS_Z(rc) {
11071 IEM_MC_ADVANCE_RIP();
11072 } IEM_MC_ELSE() {
11073 IEM_MC_RAISE_DIVIDE_ERROR();
11074 } IEM_MC_ENDIF();
11075
11076 IEM_MC_END();
11077 return VINF_SUCCESS;
11078 }
11079
11080 case IEMMODE_32BIT:
11081 {
11082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11083 IEM_MC_BEGIN(4, 1);
11084 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11085 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11086 IEM_MC_ARG(uint32_t, u32Value, 2);
11087 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11088 IEM_MC_LOCAL(int32_t, rc);
11089
11090 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11091 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11092 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11093 IEM_MC_REF_EFLAGS(pEFlags);
11094 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11095 IEM_MC_IF_LOCAL_IS_Z(rc) {
11096 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11097 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11098 IEM_MC_ADVANCE_RIP();
11099 } IEM_MC_ELSE() {
11100 IEM_MC_RAISE_DIVIDE_ERROR();
11101 } IEM_MC_ENDIF();
11102
11103 IEM_MC_END();
11104 return VINF_SUCCESS;
11105 }
11106
11107 case IEMMODE_64BIT:
11108 {
11109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11110 IEM_MC_BEGIN(4, 1);
11111 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11112 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11113 IEM_MC_ARG(uint64_t, u64Value, 2);
11114 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11115 IEM_MC_LOCAL(int32_t, rc);
11116
11117 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11118 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11119 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11120 IEM_MC_REF_EFLAGS(pEFlags);
11121 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11122 IEM_MC_IF_LOCAL_IS_Z(rc) {
11123 IEM_MC_ADVANCE_RIP();
11124 } IEM_MC_ELSE() {
11125 IEM_MC_RAISE_DIVIDE_ERROR();
11126 } IEM_MC_ENDIF();
11127
11128 IEM_MC_END();
11129 return VINF_SUCCESS;
11130 }
11131
11132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11133 }
11134 }
11135 else
11136 {
11137 /* memory access. */
11138 switch (pVCpu->iem.s.enmEffOpSize)
11139 {
11140 case IEMMODE_16BIT:
11141 {
11142 IEM_MC_BEGIN(4, 2);
11143 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11144 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11145 IEM_MC_ARG(uint16_t, u16Value, 2);
11146 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11148 IEM_MC_LOCAL(int32_t, rc);
11149
11150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11152 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11153 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11154 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11155 IEM_MC_REF_EFLAGS(pEFlags);
11156 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11157 IEM_MC_IF_LOCAL_IS_Z(rc) {
11158 IEM_MC_ADVANCE_RIP();
11159 } IEM_MC_ELSE() {
11160 IEM_MC_RAISE_DIVIDE_ERROR();
11161 } IEM_MC_ENDIF();
11162
11163 IEM_MC_END();
11164 return VINF_SUCCESS;
11165 }
11166
11167 case IEMMODE_32BIT:
11168 {
11169 IEM_MC_BEGIN(4, 2);
11170 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11171 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11172 IEM_MC_ARG(uint32_t, u32Value, 2);
11173 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11175 IEM_MC_LOCAL(int32_t, rc);
11176
11177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11179 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11180 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11181 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11182 IEM_MC_REF_EFLAGS(pEFlags);
11183 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11184 IEM_MC_IF_LOCAL_IS_Z(rc) {
11185 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11186 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11187 IEM_MC_ADVANCE_RIP();
11188 } IEM_MC_ELSE() {
11189 IEM_MC_RAISE_DIVIDE_ERROR();
11190 } IEM_MC_ENDIF();
11191
11192 IEM_MC_END();
11193 return VINF_SUCCESS;
11194 }
11195
11196 case IEMMODE_64BIT:
11197 {
11198 IEM_MC_BEGIN(4, 2);
11199 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11200 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11201 IEM_MC_ARG(uint64_t, u64Value, 2);
11202 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11204 IEM_MC_LOCAL(int32_t, rc);
11205
11206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11208 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11209 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11210 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11211 IEM_MC_REF_EFLAGS(pEFlags);
11212 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11213 IEM_MC_IF_LOCAL_IS_Z(rc) {
11214 IEM_MC_ADVANCE_RIP();
11215 } IEM_MC_ELSE() {
11216 IEM_MC_RAISE_DIVIDE_ERROR();
11217 } IEM_MC_ENDIF();
11218
11219 IEM_MC_END();
11220 return VINF_SUCCESS;
11221 }
11222
11223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11224 }
11225 }
11226}
11227
11228/**
11229 * @opcode 0xf6
11230 */
11231FNIEMOP_DEF(iemOp_Grp3_Eb)
11232{
11233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11234 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11235 {
11236 case 0:
11237 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11238 case 1:
11239/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11240 return IEMOP_RAISE_INVALID_OPCODE();
11241 case 2:
11242 IEMOP_MNEMONIC(not_Eb, "not Eb");
11243 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11244 case 3:
11245 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11246 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11247 case 4:
11248 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11250 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11251 case 5:
11252 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11254 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11255 case 6:
11256 IEMOP_MNEMONIC(div_Eb, "div Eb");
11257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11258 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11259 case 7:
11260 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11262 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11264 }
11265}
11266
11267
11268/**
11269 * @opcode 0xf7
11270 */
11271FNIEMOP_DEF(iemOp_Grp3_Ev)
11272{
11273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11274 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11275 {
11276 case 0:
11277 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11278 case 1:
11279/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11280 return IEMOP_RAISE_INVALID_OPCODE();
11281 case 2:
11282 IEMOP_MNEMONIC(not_Ev, "not Ev");
11283 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11284 case 3:
11285 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11286 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11287 case 4:
11288 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11290 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11291 case 5:
11292 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11294 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11295 case 6:
11296 IEMOP_MNEMONIC(div_Ev, "div Ev");
11297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11298 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11299 case 7:
11300 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11301 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11302 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11304 }
11305}
11306
11307
11308/**
11309 * @opcode 0xf8
11310 */
11311FNIEMOP_DEF(iemOp_clc)
11312{
11313 IEMOP_MNEMONIC(clc, "clc");
11314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11315 IEM_MC_BEGIN(0, 0);
11316 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11317 IEM_MC_ADVANCE_RIP();
11318 IEM_MC_END();
11319 return VINF_SUCCESS;
11320}
11321
11322
11323/**
11324 * @opcode 0xf9
11325 */
11326FNIEMOP_DEF(iemOp_stc)
11327{
11328 IEMOP_MNEMONIC(stc, "stc");
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330 IEM_MC_BEGIN(0, 0);
11331 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11332 IEM_MC_ADVANCE_RIP();
11333 IEM_MC_END();
11334 return VINF_SUCCESS;
11335}
11336
11337
11338/**
11339 * @opcode 0xfa
11340 */
11341FNIEMOP_DEF(iemOp_cli)
11342{
11343 IEMOP_MNEMONIC(cli, "cli");
11344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11345 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11346}
11347
11348
11349FNIEMOP_DEF(iemOp_sti)
11350{
11351 IEMOP_MNEMONIC(sti, "sti");
11352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11353 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11354}
11355
11356
11357/**
11358 * @opcode 0xfc
11359 */
11360FNIEMOP_DEF(iemOp_cld)
11361{
11362 IEMOP_MNEMONIC(cld, "cld");
11363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11364 IEM_MC_BEGIN(0, 0);
11365 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11366 IEM_MC_ADVANCE_RIP();
11367 IEM_MC_END();
11368 return VINF_SUCCESS;
11369}
11370
11371
11372/**
11373 * @opcode 0xfd
11374 */
11375FNIEMOP_DEF(iemOp_std)
11376{
11377 IEMOP_MNEMONIC(std, "std");
11378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11379 IEM_MC_BEGIN(0, 0);
11380 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11381 IEM_MC_ADVANCE_RIP();
11382 IEM_MC_END();
11383 return VINF_SUCCESS;
11384}
11385
11386
11387/**
11388 * @opcode 0xfe
11389 */
11390FNIEMOP_DEF(iemOp_Grp4)
11391{
11392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11393 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11394 {
11395 case 0:
11396 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11397 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11398 case 1:
11399 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11400 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11401 default:
11402 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11403 return IEMOP_RAISE_INVALID_OPCODE();
11404 }
11405}
11406
11407
11408/**
11409 * Opcode 0xff /2.
11410 * @param bRm The RM byte.
11411 */
11412FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11413{
11414 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11416
11417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11418 {
11419 /* The new RIP is taken from a register. */
11420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11421 switch (pVCpu->iem.s.enmEffOpSize)
11422 {
11423 case IEMMODE_16BIT:
11424 IEM_MC_BEGIN(1, 0);
11425 IEM_MC_ARG(uint16_t, u16Target, 0);
11426 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11427 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11428 IEM_MC_END()
11429 return VINF_SUCCESS;
11430
11431 case IEMMODE_32BIT:
11432 IEM_MC_BEGIN(1, 0);
11433 IEM_MC_ARG(uint32_t, u32Target, 0);
11434 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11435 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11436 IEM_MC_END()
11437 return VINF_SUCCESS;
11438
11439 case IEMMODE_64BIT:
11440 IEM_MC_BEGIN(1, 0);
11441 IEM_MC_ARG(uint64_t, u64Target, 0);
11442 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11443 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11444 IEM_MC_END()
11445 return VINF_SUCCESS;
11446
11447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11448 }
11449 }
11450 else
11451 {
11452 /* The new RIP is taken from a register. */
11453 switch (pVCpu->iem.s.enmEffOpSize)
11454 {
11455 case IEMMODE_16BIT:
11456 IEM_MC_BEGIN(1, 1);
11457 IEM_MC_ARG(uint16_t, u16Target, 0);
11458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11461 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11462 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11463 IEM_MC_END()
11464 return VINF_SUCCESS;
11465
11466 case IEMMODE_32BIT:
11467 IEM_MC_BEGIN(1, 1);
11468 IEM_MC_ARG(uint32_t, u32Target, 0);
11469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11472 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11473 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11474 IEM_MC_END()
11475 return VINF_SUCCESS;
11476
11477 case IEMMODE_64BIT:
11478 IEM_MC_BEGIN(1, 1);
11479 IEM_MC_ARG(uint64_t, u64Target, 0);
11480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11483 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11484 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11485 IEM_MC_END()
11486 return VINF_SUCCESS;
11487
11488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11489 }
11490 }
11491}
11492
11493typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11494
11495FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11496{
11497 /* Registers? How?? */
11498 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11499 { /* likely */ }
11500 else
11501 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11502
11503 /* Far pointer loaded from memory. */
11504 switch (pVCpu->iem.s.enmEffOpSize)
11505 {
11506 case IEMMODE_16BIT:
11507 IEM_MC_BEGIN(3, 1);
11508 IEM_MC_ARG(uint16_t, u16Sel, 0);
11509 IEM_MC_ARG(uint16_t, offSeg, 1);
11510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11514 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11515 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11516 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11517 IEM_MC_END();
11518 return VINF_SUCCESS;
11519
11520 case IEMMODE_64BIT:
11521 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11522 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11523 * and call far qword [rsp] encodings. */
11524 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11525 {
11526 IEM_MC_BEGIN(3, 1);
11527 IEM_MC_ARG(uint16_t, u16Sel, 0);
11528 IEM_MC_ARG(uint64_t, offSeg, 1);
11529 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11533 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11534 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11535 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11536 IEM_MC_END();
11537 return VINF_SUCCESS;
11538 }
11539 /* AMD falls thru. */
11540 RT_FALL_THRU();
11541
11542 case IEMMODE_32BIT:
11543 IEM_MC_BEGIN(3, 1);
11544 IEM_MC_ARG(uint16_t, u16Sel, 0);
11545 IEM_MC_ARG(uint32_t, offSeg, 1);
11546 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11550 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11551 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11552 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11553 IEM_MC_END();
11554 return VINF_SUCCESS;
11555
11556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11557 }
11558}
11559
11560
11561/**
11562 * Opcode 0xff /3.
11563 * @param bRm The RM byte.
11564 */
11565FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11566{
11567 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11568 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11569}
11570
11571
11572/**
11573 * Opcode 0xff /4.
11574 * @param bRm The RM byte.
11575 */
11576FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11577{
11578 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11580
11581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11582 {
11583 /* The new RIP is taken from a register. */
11584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11585 switch (pVCpu->iem.s.enmEffOpSize)
11586 {
11587 case IEMMODE_16BIT:
11588 IEM_MC_BEGIN(0, 1);
11589 IEM_MC_LOCAL(uint16_t, u16Target);
11590 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11591 IEM_MC_SET_RIP_U16(u16Target);
11592 IEM_MC_END()
11593 return VINF_SUCCESS;
11594
11595 case IEMMODE_32BIT:
11596 IEM_MC_BEGIN(0, 1);
11597 IEM_MC_LOCAL(uint32_t, u32Target);
11598 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11599 IEM_MC_SET_RIP_U32(u32Target);
11600 IEM_MC_END()
11601 return VINF_SUCCESS;
11602
11603 case IEMMODE_64BIT:
11604 IEM_MC_BEGIN(0, 1);
11605 IEM_MC_LOCAL(uint64_t, u64Target);
11606 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11607 IEM_MC_SET_RIP_U64(u64Target);
11608 IEM_MC_END()
11609 return VINF_SUCCESS;
11610
11611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11612 }
11613 }
11614 else
11615 {
11616 /* The new RIP is taken from a memory location. */
11617 switch (pVCpu->iem.s.enmEffOpSize)
11618 {
11619 case IEMMODE_16BIT:
11620 IEM_MC_BEGIN(0, 2);
11621 IEM_MC_LOCAL(uint16_t, u16Target);
11622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11625 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11626 IEM_MC_SET_RIP_U16(u16Target);
11627 IEM_MC_END()
11628 return VINF_SUCCESS;
11629
11630 case IEMMODE_32BIT:
11631 IEM_MC_BEGIN(0, 2);
11632 IEM_MC_LOCAL(uint32_t, u32Target);
11633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11636 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11637 IEM_MC_SET_RIP_U32(u32Target);
11638 IEM_MC_END()
11639 return VINF_SUCCESS;
11640
11641 case IEMMODE_64BIT:
11642 IEM_MC_BEGIN(0, 2);
11643 IEM_MC_LOCAL(uint64_t, u64Target);
11644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11647 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11648 IEM_MC_SET_RIP_U64(u64Target);
11649 IEM_MC_END()
11650 return VINF_SUCCESS;
11651
11652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11653 }
11654 }
11655}
11656
11657
11658/**
11659 * Opcode 0xff /5.
11660 * @param bRm The RM byte.
11661 */
11662FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11663{
11664 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11665 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11666}
11667
11668
11669/**
11670 * Opcode 0xff /6.
11671 * @param bRm The RM byte.
11672 */
11673FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11674{
11675 IEMOP_MNEMONIC(push_Ev, "push Ev");
11676
11677 /* Registers are handled by a common worker. */
11678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11679 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11680
11681 /* Memory we do here. */
11682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11683 switch (pVCpu->iem.s.enmEffOpSize)
11684 {
11685 case IEMMODE_16BIT:
11686 IEM_MC_BEGIN(0, 2);
11687 IEM_MC_LOCAL(uint16_t, u16Src);
11688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11691 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11692 IEM_MC_PUSH_U16(u16Src);
11693 IEM_MC_ADVANCE_RIP();
11694 IEM_MC_END();
11695 return VINF_SUCCESS;
11696
11697 case IEMMODE_32BIT:
11698 IEM_MC_BEGIN(0, 2);
11699 IEM_MC_LOCAL(uint32_t, u32Src);
11700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11703 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11704 IEM_MC_PUSH_U32(u32Src);
11705 IEM_MC_ADVANCE_RIP();
11706 IEM_MC_END();
11707 return VINF_SUCCESS;
11708
11709 case IEMMODE_64BIT:
11710 IEM_MC_BEGIN(0, 2);
11711 IEM_MC_LOCAL(uint64_t, u64Src);
11712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11715 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11716 IEM_MC_PUSH_U64(u64Src);
11717 IEM_MC_ADVANCE_RIP();
11718 IEM_MC_END();
11719 return VINF_SUCCESS;
11720
11721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11722 }
11723}
11724
11725
11726/**
11727 * @opcode 0xff
11728 */
11729FNIEMOP_DEF(iemOp_Grp5)
11730{
11731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11732 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11733 {
11734 case 0:
11735 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11736 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11737 case 1:
11738 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11739 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11740 case 2:
11741 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11742 case 3:
11743 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11744 case 4:
11745 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11746 case 5:
11747 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11748 case 6:
11749 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11750 case 7:
11751 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11752 return IEMOP_RAISE_INVALID_OPCODE();
11753 }
11754 AssertFailedReturn(VERR_IEM_IPE_3);
11755}
11756
11757
11758
11759const PFNIEMOP g_apfnOneByteMap[256] =
11760{
11761 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11762 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11763 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11764 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11765 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11766 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11767 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11768 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11769 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11770 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11771 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11772 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11773 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11774 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11775 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11776 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11777 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11778 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11779 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11780 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11781 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11782 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11783 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11784 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11785 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11786 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11787 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11788 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11789 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11790 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11791 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11792 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11793 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11794 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11795 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11796 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11797 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11798 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11799 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11800 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11801 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11802 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11803 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11804 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11805 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11806 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11807 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11808 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11809 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11810 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11811 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11812 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11813 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11814 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11815 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11816 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11817 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11818 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11819 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11820 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11821 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11822 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11823 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11824 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11825};
11826
11827
11828/** @} */
11829
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette