VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 74490

Last change on this file since 74490 was 74490, checked in by vboxsync, 6 years ago

IEM: Added a few missing mnemonics, fixed typos.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 392.8 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 74490 2018-09-27 09:54:26Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_sizes
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
910 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
912 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
913 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
914 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
915 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
916 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
917 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
918 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
919 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
920 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
921 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
922 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
925 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
926 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
927 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
928 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
929 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
930 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
931 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
933 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
936 */
937FNIEMOP_DEF(iemOp_aaa)
938{
939 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
940 IEMOP_HLP_NO_64BIT();
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
943
944 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
945}
946
947
948/**
949 * @opcode 0x38
950 */
951FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
952{
953 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
955}
956
957
958/**
959 * @opcode 0x39
960 */
961FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
962{
963 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x3a
970 */
971FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
972{
973 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3b
980 */
981FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
982{
983 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3c
990 */
991FNIEMOP_DEF(iemOp_cmp_Al_Ib)
992{
993 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3d
1000 */
1001FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1002{
1003 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3e
1010 */
1011FNIEMOP_DEF(iemOp_seg_DS)
1012{
1013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1015 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1016
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1019}
1020
1021
1022/**
1023 * @opcode 0x3f
1024 * @opfltest af,cf
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef pf,zf,sf,of
1027 * @opgroup og_gen_arith_dec
1028 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1029 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1030 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1031 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1032 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1033 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1034 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1035 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1036 * @optest8 amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1037 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1038 * @optest10 amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1039 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1040 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1041 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1042 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1043 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1045 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1046 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1047 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1048 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1049 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1050 * @optest22 amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1051 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1052 * @optest24 amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1053 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest26 amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest28 amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1058 * @optest30 amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1059 * @optest31 intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1060 * @optest32 amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1061 * @optest33 intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1062 * @optest34 amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1063 */
1064FNIEMOP_DEF(iemOp_aas)
1065{
1066 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1067 IEMOP_HLP_NO_64BIT();
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1070
1071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1072}
1073
1074
1075/**
1076 * Common 'inc/dec/not/neg register' helper.
1077 */
1078FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1079{
1080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1081 switch (pVCpu->iem.s.enmEffOpSize)
1082 {
1083 case IEMMODE_16BIT:
1084 IEM_MC_BEGIN(2, 0);
1085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1086 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1087 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1088 IEM_MC_REF_EFLAGS(pEFlags);
1089 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 return VINF_SUCCESS;
1093
1094 case IEMMODE_32BIT:
1095 IEM_MC_BEGIN(2, 0);
1096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1097 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1098 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1099 IEM_MC_REF_EFLAGS(pEFlags);
1100 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 return VINF_SUCCESS;
1105
1106 case IEMMODE_64BIT:
1107 IEM_MC_BEGIN(2, 0);
1108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1109 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1110 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1111 IEM_MC_REF_EFLAGS(pEFlags);
1112 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1113 IEM_MC_ADVANCE_RIP();
1114 IEM_MC_END();
1115 return VINF_SUCCESS;
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/**
1122 * @opcode 0x40
1123 */
1124FNIEMOP_DEF(iemOp_inc_eAX)
1125{
1126 /*
1127 * This is a REX prefix in 64-bit mode.
1128 */
1129 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1130 {
1131 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1132 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1133
1134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1136 }
1137
1138 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1139 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1140}
1141
1142
1143/**
1144 * @opcode 0x41
1145 */
1146FNIEMOP_DEF(iemOp_inc_eCX)
1147{
1148 /*
1149 * This is a REX prefix in 64-bit mode.
1150 */
1151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1152 {
1153 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1154 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1155 pVCpu->iem.s.uRexB = 1 << 3;
1156
1157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1159 }
1160
1161 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1163}
1164
1165
1166/**
1167 * @opcode 0x42
1168 */
1169FNIEMOP_DEF(iemOp_inc_eDX)
1170{
1171 /*
1172 * This is a REX prefix in 64-bit mode.
1173 */
1174 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1175 {
1176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1178 pVCpu->iem.s.uRexIndex = 1 << 3;
1179
1180 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1181 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1182 }
1183
1184 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1185 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1186}
1187
1188
1189
1190/**
1191 * @opcode 0x43
1192 */
1193FNIEMOP_DEF(iemOp_inc_eBX)
1194{
1195 /*
1196 * This is a REX prefix in 64-bit mode.
1197 */
1198 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1199 {
1200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1202 pVCpu->iem.s.uRexB = 1 << 3;
1203 pVCpu->iem.s.uRexIndex = 1 << 3;
1204
1205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1206 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1207 }
1208
1209 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1210 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1211}
1212
1213
1214/**
1215 * @opcode 0x44
1216 */
1217FNIEMOP_DEF(iemOp_inc_eSP)
1218{
1219 /*
1220 * This is a REX prefix in 64-bit mode.
1221 */
1222 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1223 {
1224 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1226 pVCpu->iem.s.uRexReg = 1 << 3;
1227
1228 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1229 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1230 }
1231
1232 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1233 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1234}
1235
1236
1237/**
1238 * @opcode 0x45
1239 */
1240FNIEMOP_DEF(iemOp_inc_eBP)
1241{
1242 /*
1243 * This is a REX prefix in 64-bit mode.
1244 */
1245 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1246 {
1247 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1249 pVCpu->iem.s.uRexReg = 1 << 3;
1250 pVCpu->iem.s.uRexB = 1 << 3;
1251
1252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1254 }
1255
1256 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1258}
1259
1260
1261/**
1262 * @opcode 0x46
1263 */
1264FNIEMOP_DEF(iemOp_inc_eSI)
1265{
1266 /*
1267 * This is a REX prefix in 64-bit mode.
1268 */
1269 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1270 {
1271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1273 pVCpu->iem.s.uRexReg = 1 << 3;
1274 pVCpu->iem.s.uRexIndex = 1 << 3;
1275
1276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1277 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1278 }
1279
1280 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1281 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1282}
1283
1284
1285/**
1286 * @opcode 0x47
1287 */
1288FNIEMOP_DEF(iemOp_inc_eDI)
1289{
1290 /*
1291 * This is a REX prefix in 64-bit mode.
1292 */
1293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1294 {
1295 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1297 pVCpu->iem.s.uRexReg = 1 << 3;
1298 pVCpu->iem.s.uRexB = 1 << 3;
1299 pVCpu->iem.s.uRexIndex = 1 << 3;
1300
1301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1303 }
1304
1305 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1306 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1307}
1308
1309
1310/**
1311 * @opcode 0x48
1312 */
1313FNIEMOP_DEF(iemOp_dec_eAX)
1314{
1315 /*
1316 * This is a REX prefix in 64-bit mode.
1317 */
1318 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1319 {
1320 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1321 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1322 iemRecalEffOpSize(pVCpu);
1323
1324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1326 }
1327
1328 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1329 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1330}
1331
1332
1333/**
1334 * @opcode 0x49
1335 */
1336FNIEMOP_DEF(iemOp_dec_eCX)
1337{
1338 /*
1339 * This is a REX prefix in 64-bit mode.
1340 */
1341 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1342 {
1343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1344 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1345 pVCpu->iem.s.uRexB = 1 << 3;
1346 iemRecalEffOpSize(pVCpu);
1347
1348 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1349 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1350 }
1351
1352 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1353 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1354}
1355
1356
1357/**
1358 * @opcode 0x4a
1359 */
1360FNIEMOP_DEF(iemOp_dec_eDX)
1361{
1362 /*
1363 * This is a REX prefix in 64-bit mode.
1364 */
1365 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1366 {
1367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1368 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1369 pVCpu->iem.s.uRexIndex = 1 << 3;
1370 iemRecalEffOpSize(pVCpu);
1371
1372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1374 }
1375
1376 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1377 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1378}
1379
1380
1381/**
1382 * @opcode 0x4b
1383 */
1384FNIEMOP_DEF(iemOp_dec_eBX)
1385{
1386 /*
1387 * This is a REX prefix in 64-bit mode.
1388 */
1389 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1390 {
1391 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1392 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1393 pVCpu->iem.s.uRexB = 1 << 3;
1394 pVCpu->iem.s.uRexIndex = 1 << 3;
1395 iemRecalEffOpSize(pVCpu);
1396
1397 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1398 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1399 }
1400
1401 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1402 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1403}
1404
1405
1406/**
1407 * @opcode 0x4c
1408 */
1409FNIEMOP_DEF(iemOp_dec_eSP)
1410{
1411 /*
1412 * This is a REX prefix in 64-bit mode.
1413 */
1414 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1415 {
1416 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1417 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1418 pVCpu->iem.s.uRexReg = 1 << 3;
1419 iemRecalEffOpSize(pVCpu);
1420
1421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1423 }
1424
1425 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1427}
1428
1429
1430/**
1431 * @opcode 0x4d
1432 */
1433FNIEMOP_DEF(iemOp_dec_eBP)
1434{
1435 /*
1436 * This is a REX prefix in 64-bit mode.
1437 */
1438 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1439 {
1440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1442 pVCpu->iem.s.uRexReg = 1 << 3;
1443 pVCpu->iem.s.uRexB = 1 << 3;
1444 iemRecalEffOpSize(pVCpu);
1445
1446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1448 }
1449
1450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1451 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1452}
1453
1454
1455/**
1456 * @opcode 0x4e
1457 */
1458FNIEMOP_DEF(iemOp_dec_eSI)
1459{
1460 /*
1461 * This is a REX prefix in 64-bit mode.
1462 */
1463 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1464 {
1465 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1466 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1467 pVCpu->iem.s.uRexReg = 1 << 3;
1468 pVCpu->iem.s.uRexIndex = 1 << 3;
1469 iemRecalEffOpSize(pVCpu);
1470
1471 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1472 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1473 }
1474
1475 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1476 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1477}
1478
1479
1480/**
1481 * @opcode 0x4f
1482 */
1483FNIEMOP_DEF(iemOp_dec_eDI)
1484{
1485 /*
1486 * This is a REX prefix in 64-bit mode.
1487 */
1488 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1489 {
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1492 pVCpu->iem.s.uRexReg = 1 << 3;
1493 pVCpu->iem.s.uRexB = 1 << 3;
1494 pVCpu->iem.s.uRexIndex = 1 << 3;
1495 iemRecalEffOpSize(pVCpu);
1496
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1499 }
1500
1501 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1503}
1504
1505
1506/**
1507 * Common 'push register' helper.
1508 */
1509FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1510{
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1513 {
1514 iReg |= pVCpu->iem.s.uRexB;
1515 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1516 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1517 }
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t, u16Value);
1524 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1525 IEM_MC_PUSH_U16(u16Value);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t, u32Value);
1533 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1534 IEM_MC_PUSH_U32(u32Value);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 break;
1538
1539 case IEMMODE_64BIT:
1540 IEM_MC_BEGIN(0, 1);
1541 IEM_MC_LOCAL(uint64_t, u64Value);
1542 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1543 IEM_MC_PUSH_U64(u64Value);
1544 IEM_MC_ADVANCE_RIP();
1545 IEM_MC_END();
1546 break;
1547 }
1548
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x50
1555 */
1556FNIEMOP_DEF(iemOp_push_eAX)
1557{
1558 IEMOP_MNEMONIC(push_rAX, "push rAX");
1559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1560}
1561
1562
1563/**
1564 * @opcode 0x51
1565 */
1566FNIEMOP_DEF(iemOp_push_eCX)
1567{
1568 IEMOP_MNEMONIC(push_rCX, "push rCX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1570}
1571
1572
1573/**
1574 * @opcode 0x52
1575 */
1576FNIEMOP_DEF(iemOp_push_eDX)
1577{
1578 IEMOP_MNEMONIC(push_rDX, "push rDX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1580}
1581
1582
1583/**
1584 * @opcode 0x53
1585 */
1586FNIEMOP_DEF(iemOp_push_eBX)
1587{
1588 IEMOP_MNEMONIC(push_rBX, "push rBX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1590}
1591
1592
1593/**
1594 * @opcode 0x54
1595 */
1596FNIEMOP_DEF(iemOp_push_eSP)
1597{
1598 IEMOP_MNEMONIC(push_rSP, "push rSP");
1599 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1600 {
1601 IEM_MC_BEGIN(0, 1);
1602 IEM_MC_LOCAL(uint16_t, u16Value);
1603 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1604 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1605 IEM_MC_PUSH_U16(u16Value);
1606 IEM_MC_ADVANCE_RIP();
1607 IEM_MC_END();
1608 }
1609 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1610}
1611
1612
1613/**
1614 * @opcode 0x55
1615 */
1616FNIEMOP_DEF(iemOp_push_eBP)
1617{
1618 IEMOP_MNEMONIC(push_rBP, "push rBP");
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1620}
1621
1622
1623/**
1624 * @opcode 0x56
1625 */
1626FNIEMOP_DEF(iemOp_push_eSI)
1627{
1628 IEMOP_MNEMONIC(push_rSI, "push rSI");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1630}
1631
1632
1633/**
1634 * @opcode 0x57
1635 */
1636FNIEMOP_DEF(iemOp_push_eDI)
1637{
1638 IEMOP_MNEMONIC(push_rDI, "push rDI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1640}
1641
1642
1643/**
1644 * Common 'pop register' helper.
1645 */
1646FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1647{
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1650 {
1651 iReg |= pVCpu->iem.s.uRexB;
1652 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1653 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1654 }
1655
1656 switch (pVCpu->iem.s.enmEffOpSize)
1657 {
1658 case IEMMODE_16BIT:
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1661 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1662 IEM_MC_POP_U16(pu16Dst);
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 break;
1666
1667 case IEMMODE_32BIT:
1668 IEM_MC_BEGIN(0, 1);
1669 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1670 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1671 IEM_MC_POP_U32(pu32Dst);
1672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_64BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1680 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1681 IEM_MC_POP_U64(pu64Dst);
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 break;
1685 }
1686
1687 return VINF_SUCCESS;
1688}
1689
1690
1691/**
1692 * @opcode 0x58
1693 */
1694FNIEMOP_DEF(iemOp_pop_eAX)
1695{
1696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1698}
1699
1700
1701/**
1702 * @opcode 0x59
1703 */
1704FNIEMOP_DEF(iemOp_pop_eCX)
1705{
1706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1708}
1709
1710
1711/**
1712 * @opcode 0x5a
1713 */
1714FNIEMOP_DEF(iemOp_pop_eDX)
1715{
1716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5b
1723 */
1724FNIEMOP_DEF(iemOp_pop_eBX)
1725{
1726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5c
1733 */
1734FNIEMOP_DEF(iemOp_pop_eSP)
1735{
1736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1737 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1738 {
1739 if (pVCpu->iem.s.uRexB)
1740 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1741 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1742 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1743 }
1744
1745 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1746 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1747 /** @todo add testcase for this instruction. */
1748 switch (pVCpu->iem.s.enmEffOpSize)
1749 {
1750 case IEMMODE_16BIT:
1751 IEM_MC_BEGIN(0, 1);
1752 IEM_MC_LOCAL(uint16_t, u16Dst);
1753 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1754 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1755 IEM_MC_ADVANCE_RIP();
1756 IEM_MC_END();
1757 break;
1758
1759 case IEMMODE_32BIT:
1760 IEM_MC_BEGIN(0, 1);
1761 IEM_MC_LOCAL(uint32_t, u32Dst);
1762 IEM_MC_POP_U32(&u32Dst);
1763 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 break;
1767
1768 case IEMMODE_64BIT:
1769 IEM_MC_BEGIN(0, 1);
1770 IEM_MC_LOCAL(uint64_t, u64Dst);
1771 IEM_MC_POP_U64(&u64Dst);
1772 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1773 IEM_MC_ADVANCE_RIP();
1774 IEM_MC_END();
1775 break;
1776 }
1777
1778 return VINF_SUCCESS;
1779}
1780
1781
1782/**
1783 * @opcode 0x5d
1784 */
1785FNIEMOP_DEF(iemOp_pop_eBP)
1786{
1787 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1788 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1789}
1790
1791
1792/**
1793 * @opcode 0x5e
1794 */
1795FNIEMOP_DEF(iemOp_pop_eSI)
1796{
1797 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1799}
1800
1801
1802/**
1803 * @opcode 0x5f
1804 */
1805FNIEMOP_DEF(iemOp_pop_eDI)
1806{
1807 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1809}
1810
1811
1812/**
1813 * @opcode 0x60
1814 */
1815FNIEMOP_DEF(iemOp_pusha)
1816{
1817 IEMOP_MNEMONIC(pusha, "pusha");
1818 IEMOP_HLP_MIN_186();
1819 IEMOP_HLP_NO_64BIT();
1820 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1822 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1823 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1824}
1825
1826
1827/**
1828 * @opcode 0x61
1829 */
1830FNIEMOP_DEF(iemOp_popa__mvex)
1831{
1832 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1833 {
1834 IEMOP_MNEMONIC(popa, "popa");
1835 IEMOP_HLP_MIN_186();
1836 IEMOP_HLP_NO_64BIT();
1837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1838 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1839 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1841 }
1842 IEMOP_MNEMONIC(mvex, "mvex");
1843 Log(("mvex prefix is not supported!\n"));
1844 return IEMOP_RAISE_INVALID_OPCODE();
1845}
1846
1847
1848/**
1849 * @opcode 0x62
1850 * @opmnemonic bound
1851 * @op1 Gv_RO
1852 * @op2 Ma
1853 * @opmincpu 80186
1854 * @ophints harmless invalid_64
1855 * @optest op1=0 op2=0 ->
1856 * @optest op1=1 op2=0 -> value.xcpt=5
1857 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1858 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1859 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1860 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1861 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1862 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1863 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1864 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1865 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1866 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1867 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1868 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1869 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1870 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1871 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1872 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1873 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1874 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1875 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1876 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1877 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1878 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1879 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1880 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1881 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1882 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1883 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1884 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1885 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1886 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1887 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1888 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1889 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1890 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1891 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1892 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1893 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1894 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1895 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1896 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1897 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1898 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1899 */
1900FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1901{
1902 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1903 compatability mode it is invalid with MOD=3.
1904
1905 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1906 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1907 given as R and X without an exact description, so we assume it builds on
1908 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1909 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1910 uint8_t bRm;
1911 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1912 {
1913 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1914 IEMOP_HLP_MIN_186();
1915 IEM_OPCODE_GET_NEXT_U8(&bRm);
1916 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1917 {
1918 /** @todo testcase: check that there are two memory accesses involved. Check
1919 * whether they're both read before the \#BR triggers. */
1920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1921 {
1922 IEM_MC_BEGIN(3, 1);
1923 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1924 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1925 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1930
1931 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1932 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1934
1935 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1936 IEM_MC_END();
1937 }
1938 else /* 32-bit operands */
1939 {
1940 IEM_MC_BEGIN(3, 1);
1941 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1942 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1943 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1945
1946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1948
1949 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1950 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1952
1953 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1954 IEM_MC_END();
1955 }
1956 }
1957
1958 /*
1959 * @opdone
1960 */
1961 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1962 {
1963 /* Note that there is no need for the CPU to fetch further bytes
1964 here because MODRM.MOD == 3. */
1965 Log(("evex not supported by the guest CPU!\n"));
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 }
1968 }
1969 else
1970 {
1971 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1972 * does modr/m read, whereas AMD probably doesn't... */
1973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1974 {
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1977 }
1978 IEM_OPCODE_GET_NEXT_U8(&bRm);
1979 }
1980
1981 IEMOP_MNEMONIC(evex, "evex");
1982 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1983 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1984 Log(("evex prefix is not implemented!\n"));
1985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1986}
1987
1988
1989/** Opcode 0x63 - non-64-bit modes. */
1990FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1991{
1992 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1993 IEMOP_HLP_MIN_286();
1994 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996
1997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1998 {
1999 /* Register */
2000 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2001 IEM_MC_BEGIN(3, 0);
2002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2003 IEM_MC_ARG(uint16_t, u16Src, 1);
2004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2005
2006 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
2008 IEM_MC_REF_EFLAGS(pEFlags);
2009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 else
2015 {
2016 /* Memory */
2017 IEM_MC_BEGIN(3, 2);
2018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2019 IEM_MC_ARG(uint16_t, u16Src, 1);
2020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2024 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2026 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2027 IEM_MC_FETCH_EFLAGS(EFlags);
2028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2029
2030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2031 IEM_MC_COMMIT_EFLAGS(EFlags);
2032 IEM_MC_ADVANCE_RIP();
2033 IEM_MC_END();
2034 }
2035 return VINF_SUCCESS;
2036
2037}
2038
2039
2040/**
2041 * @opcode 0x63
2042 *
2043 * @note This is a weird one. It works like a regular move instruction if
2044 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2045 * @todo This definitely needs a testcase to verify the odd cases. */
2046FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2047{
2048 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2049
2050 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052
2053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2054 {
2055 /*
2056 * Register to register.
2057 */
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 1);
2060 IEM_MC_LOCAL(uint64_t, u64Value);
2061 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2062 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * We're loading a register from memory.
2070 */
2071 IEM_MC_BEGIN(0, 2);
2072 IEM_MC_LOCAL(uint64_t, u64Value);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2078 IEM_MC_ADVANCE_RIP();
2079 IEM_MC_END();
2080 }
2081 return VINF_SUCCESS;
2082}
2083
2084
2085/**
2086 * @opcode 0x64
2087 * @opmnemonic segfs
2088 * @opmincpu 80386
2089 * @opgroup og_prefixes
2090 */
2091FNIEMOP_DEF(iemOp_seg_FS)
2092{
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2094 IEMOP_HLP_MIN_386();
2095
2096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2097 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2098
2099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2100 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2101}
2102
2103
2104/**
2105 * @opcode 0x65
2106 * @opmnemonic seggs
2107 * @opmincpu 80386
2108 * @opgroup og_prefixes
2109 */
2110FNIEMOP_DEF(iemOp_seg_GS)
2111{
2112 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2113 IEMOP_HLP_MIN_386();
2114
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2116 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2117
2118 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2119 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2120}
2121
2122
2123/**
2124 * @opcode 0x66
2125 * @opmnemonic opsize
2126 * @openc prefix
2127 * @opmincpu 80386
2128 * @ophints harmless
2129 * @opgroup og_prefixes
2130 */
2131FNIEMOP_DEF(iemOp_op_size)
2132{
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2134 IEMOP_HLP_MIN_386();
2135
2136 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2140 when REPZ or REPNZ are present. */
2141 if (pVCpu->iem.s.idxPrefix == 0)
2142 pVCpu->iem.s.idxPrefix = 1;
2143
2144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2146}
2147
2148
2149/**
2150 * @opcode 0x67
2151 * @opmnemonic addrsize
2152 * @openc prefix
2153 * @opmincpu 80386
2154 * @ophints harmless
2155 * @opgroup og_prefixes
2156 */
2157FNIEMOP_DEF(iemOp_addr_size)
2158{
2159 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2160 IEMOP_HLP_MIN_386();
2161
2162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2163 switch (pVCpu->iem.s.enmDefAddrMode)
2164 {
2165 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2166 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2167 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2168 default: AssertFailed();
2169 }
2170
2171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2173}
2174
2175
2176/**
2177 * @opcode 0x68
2178 */
2179FNIEMOP_DEF(iemOp_push_Iz)
2180{
2181 IEMOP_MNEMONIC(push_Iz, "push Iz");
2182 IEMOP_HLP_MIN_186();
2183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2184 switch (pVCpu->iem.s.enmEffOpSize)
2185 {
2186 case IEMMODE_16BIT:
2187 {
2188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEM_MC_BEGIN(0,0);
2191 IEM_MC_PUSH_U16(u16Imm);
2192 IEM_MC_ADVANCE_RIP();
2193 IEM_MC_END();
2194 return VINF_SUCCESS;
2195 }
2196
2197 case IEMMODE_32BIT:
2198 {
2199 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_BEGIN(0,0);
2202 IEM_MC_PUSH_U32(u32Imm);
2203 IEM_MC_ADVANCE_RIP();
2204 IEM_MC_END();
2205 return VINF_SUCCESS;
2206 }
2207
2208 case IEMMODE_64BIT:
2209 {
2210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_BEGIN(0,0);
2213 IEM_MC_PUSH_U64(u64Imm);
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 return VINF_SUCCESS;
2217 }
2218
2219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2220 }
2221}
2222
2223
2224/**
2225 * @opcode 0x69
2226 */
2227FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2228{
2229 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2230 IEMOP_HLP_MIN_186();
2231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2233
2234 switch (pVCpu->iem.s.enmEffOpSize)
2235 {
2236 case IEMMODE_16BIT:
2237 {
2238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2239 {
2240 /* register operand */
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(3, 1);
2245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2246 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint16_t, u16Tmp);
2249
2250 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2251 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2252 IEM_MC_REF_EFLAGS(pEFlags);
2253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2254 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2255
2256 IEM_MC_ADVANCE_RIP();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 /* memory operand */
2262 IEM_MC_BEGIN(3, 2);
2263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2264 IEM_MC_ARG(uint16_t, u16Src, 1);
2265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2266 IEM_MC_LOCAL(uint16_t, u16Tmp);
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2271 IEM_MC_ASSIGN(u16Src, u16Imm);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2274 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2275 IEM_MC_REF_EFLAGS(pEFlags);
2276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2277 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2278
2279 IEM_MC_ADVANCE_RIP();
2280 IEM_MC_END();
2281 }
2282 return VINF_SUCCESS;
2283 }
2284
2285 case IEMMODE_32BIT:
2286 {
2287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2288 {
2289 /* register operand */
2290 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292
2293 IEM_MC_BEGIN(3, 1);
2294 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2295 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2296 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2297 IEM_MC_LOCAL(uint32_t, u32Tmp);
2298
2299 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2300 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2301 IEM_MC_REF_EFLAGS(pEFlags);
2302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2303 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2304
2305 IEM_MC_ADVANCE_RIP();
2306 IEM_MC_END();
2307 }
2308 else
2309 {
2310 /* memory operand */
2311 IEM_MC_BEGIN(3, 2);
2312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2313 IEM_MC_ARG(uint32_t, u32Src, 1);
2314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2315 IEM_MC_LOCAL(uint32_t, u32Tmp);
2316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2317
2318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2319 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2320 IEM_MC_ASSIGN(u32Src, u32Imm);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2323 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2324 IEM_MC_REF_EFLAGS(pEFlags);
2325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2326 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2327
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 return VINF_SUCCESS;
2332 }
2333
2334 case IEMMODE_64BIT:
2335 {
2336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2337 {
2338 /* register operand */
2339 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341
2342 IEM_MC_BEGIN(3, 1);
2343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2344 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2346 IEM_MC_LOCAL(uint64_t, u64Tmp);
2347
2348 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2349 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2350 IEM_MC_REF_EFLAGS(pEFlags);
2351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2352 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2353
2354 IEM_MC_ADVANCE_RIP();
2355 IEM_MC_END();
2356 }
2357 else
2358 {
2359 /* memory operand */
2360 IEM_MC_BEGIN(3, 2);
2361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2362 IEM_MC_ARG(uint64_t, u64Src, 1);
2363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2364 IEM_MC_LOCAL(uint64_t, u64Tmp);
2365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2366
2367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2368 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2369 IEM_MC_ASSIGN(u64Src, u64Imm);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2372 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2373 IEM_MC_REF_EFLAGS(pEFlags);
2374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2376
2377 IEM_MC_ADVANCE_RIP();
2378 IEM_MC_END();
2379 }
2380 return VINF_SUCCESS;
2381 }
2382 }
2383 AssertFailedReturn(VERR_IEM_IPE_9);
2384}
2385
2386
2387/**
2388 * @opcode 0x6a
2389 */
2390FNIEMOP_DEF(iemOp_push_Ib)
2391{
2392 IEMOP_MNEMONIC(push_Ib, "push Ib");
2393 IEMOP_HLP_MIN_186();
2394 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2397
2398 IEM_MC_BEGIN(0,0);
2399 switch (pVCpu->iem.s.enmEffOpSize)
2400 {
2401 case IEMMODE_16BIT:
2402 IEM_MC_PUSH_U16(i8Imm);
2403 break;
2404 case IEMMODE_32BIT:
2405 IEM_MC_PUSH_U32(i8Imm);
2406 break;
2407 case IEMMODE_64BIT:
2408 IEM_MC_PUSH_U64(i8Imm);
2409 break;
2410 }
2411 IEM_MC_ADVANCE_RIP();
2412 IEM_MC_END();
2413 return VINF_SUCCESS;
2414}
2415
2416
2417/**
2418 * @opcode 0x6b
2419 */
2420FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2421{
2422 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2423 IEMOP_HLP_MIN_186();
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2426
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2431 {
2432 /* register operand */
2433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2435
2436 IEM_MC_BEGIN(3, 1);
2437 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2438 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2439 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2440 IEM_MC_LOCAL(uint16_t, u16Tmp);
2441
2442 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2443 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2444 IEM_MC_REF_EFLAGS(pEFlags);
2445 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2446 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2447
2448 IEM_MC_ADVANCE_RIP();
2449 IEM_MC_END();
2450 }
2451 else
2452 {
2453 /* memory operand */
2454 IEM_MC_BEGIN(3, 2);
2455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2456 IEM_MC_ARG(uint16_t, u16Src, 1);
2457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2458 IEM_MC_LOCAL(uint16_t, u16Tmp);
2459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2460
2461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2462 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2463 IEM_MC_ASSIGN(u16Src, u16Imm);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2466 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2467 IEM_MC_REF_EFLAGS(pEFlags);
2468 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2469 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2470
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 return VINF_SUCCESS;
2475
2476 case IEMMODE_32BIT:
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* register operand */
2480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482
2483 IEM_MC_BEGIN(3, 1);
2484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2485 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2487 IEM_MC_LOCAL(uint32_t, u32Tmp);
2488
2489 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2490 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2491 IEM_MC_REF_EFLAGS(pEFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2493 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2494
2495 IEM_MC_ADVANCE_RIP();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 /* memory operand */
2501 IEM_MC_BEGIN(3, 2);
2502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2503 IEM_MC_ARG(uint32_t, u32Src, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint32_t, u32Tmp);
2506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2507
2508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2509 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2510 IEM_MC_ASSIGN(u32Src, u32Imm);
2511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2512 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2513 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2514 IEM_MC_REF_EFLAGS(pEFlags);
2515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP();
2519 IEM_MC_END();
2520 }
2521 return VINF_SUCCESS;
2522
2523 case IEMMODE_64BIT:
2524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2525 {
2526 /* register operand */
2527 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2529
2530 IEM_MC_BEGIN(3, 1);
2531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2532 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2534 IEM_MC_LOCAL(uint64_t, u64Tmp);
2535
2536 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2537 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2538 IEM_MC_REF_EFLAGS(pEFlags);
2539 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2540 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 else
2546 {
2547 /* memory operand */
2548 IEM_MC_BEGIN(3, 2);
2549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2550 IEM_MC_ARG(uint64_t, u64Src, 1);
2551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2552 IEM_MC_LOCAL(uint64_t, u64Tmp);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2556 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2557 IEM_MC_ASSIGN(u64Src, u64Imm);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2560 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2561 IEM_MC_REF_EFLAGS(pEFlags);
2562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2563 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564
2565 IEM_MC_ADVANCE_RIP();
2566 IEM_MC_END();
2567 }
2568 return VINF_SUCCESS;
2569 }
2570 AssertFailedReturn(VERR_IEM_IPE_8);
2571}
2572
2573
2574/**
2575 * @opcode 0x6c
2576 */
2577FNIEMOP_DEF(iemOp_insb_Yb_DX)
2578{
2579 IEMOP_HLP_MIN_186();
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2582 {
2583 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2584 switch (pVCpu->iem.s.enmEffAddrMode)
2585 {
2586 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2587 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2588 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2590 }
2591 }
2592 else
2593 {
2594 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2595 switch (pVCpu->iem.s.enmEffAddrMode)
2596 {
2597 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2601 }
2602 }
2603}
2604
2605
2606/**
2607 * @opcode 0x6d
2608 */
2609FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2610{
2611 IEMOP_HLP_MIN_186();
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2614 {
2615 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2616 switch (pVCpu->iem.s.enmEffOpSize)
2617 {
2618 case IEMMODE_16BIT:
2619 switch (pVCpu->iem.s.enmEffAddrMode)
2620 {
2621 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2625 }
2626 break;
2627 case IEMMODE_64BIT:
2628 case IEMMODE_32BIT:
2629 switch (pVCpu->iem.s.enmEffAddrMode)
2630 {
2631 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2632 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2633 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2635 }
2636 break;
2637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2638 }
2639 }
2640 else
2641 {
2642 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2643 switch (pVCpu->iem.s.enmEffOpSize)
2644 {
2645 case IEMMODE_16BIT:
2646 switch (pVCpu->iem.s.enmEffAddrMode)
2647 {
2648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2652 }
2653 break;
2654 case IEMMODE_64BIT:
2655 case IEMMODE_32BIT:
2656 switch (pVCpu->iem.s.enmEffAddrMode)
2657 {
2658 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2659 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2660 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2662 }
2663 break;
2664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2665 }
2666 }
2667}
2668
2669
2670/**
2671 * @opcode 0x6e
2672 */
2673FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2674{
2675 IEMOP_HLP_MIN_186();
2676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2677 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2678 {
2679 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2680 switch (pVCpu->iem.s.enmEffAddrMode)
2681 {
2682 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687 }
2688 else
2689 {
2690 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2691 switch (pVCpu->iem.s.enmEffAddrMode)
2692 {
2693 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2697 }
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x6f
2704 */
2705FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2706{
2707 IEMOP_HLP_MIN_186();
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2710 {
2711 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 switch (pVCpu->iem.s.enmEffAddrMode)
2716 {
2717 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2718 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2719 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2721 }
2722 break;
2723 case IEMMODE_64BIT:
2724 case IEMMODE_32BIT:
2725 switch (pVCpu->iem.s.enmEffAddrMode)
2726 {
2727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2731 }
2732 break;
2733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2734 }
2735 }
2736 else
2737 {
2738 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2739 switch (pVCpu->iem.s.enmEffOpSize)
2740 {
2741 case IEMMODE_16BIT:
2742 switch (pVCpu->iem.s.enmEffAddrMode)
2743 {
2744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2748 }
2749 break;
2750 case IEMMODE_64BIT:
2751 case IEMMODE_32BIT:
2752 switch (pVCpu->iem.s.enmEffAddrMode)
2753 {
2754 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2755 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2758 }
2759 break;
2760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2761 }
2762 }
2763}
2764
2765
2766/**
2767 * @opcode 0x70
2768 */
2769FNIEMOP_DEF(iemOp_jo_Jb)
2770{
2771 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2772 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2775
2776 IEM_MC_BEGIN(0, 0);
2777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2778 IEM_MC_REL_JMP_S8(i8Imm);
2779 } IEM_MC_ELSE() {
2780 IEM_MC_ADVANCE_RIP();
2781 } IEM_MC_ENDIF();
2782 IEM_MC_END();
2783 return VINF_SUCCESS;
2784}
2785
2786
2787/**
2788 * @opcode 0x71
2789 */
2790FNIEMOP_DEF(iemOp_jno_Jb)
2791{
2792 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2793 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2796
2797 IEM_MC_BEGIN(0, 0);
2798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2799 IEM_MC_ADVANCE_RIP();
2800 } IEM_MC_ELSE() {
2801 IEM_MC_REL_JMP_S8(i8Imm);
2802 } IEM_MC_ENDIF();
2803 IEM_MC_END();
2804 return VINF_SUCCESS;
2805}
2806
2807/**
2808 * @opcode 0x72
2809 */
2810FNIEMOP_DEF(iemOp_jc_Jb)
2811{
2812 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2813 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816
2817 IEM_MC_BEGIN(0, 0);
2818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2819 IEM_MC_REL_JMP_S8(i8Imm);
2820 } IEM_MC_ELSE() {
2821 IEM_MC_ADVANCE_RIP();
2822 } IEM_MC_ENDIF();
2823 IEM_MC_END();
2824 return VINF_SUCCESS;
2825}
2826
2827
2828/**
2829 * @opcode 0x73
2830 */
2831FNIEMOP_DEF(iemOp_jnc_Jb)
2832{
2833 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2834 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2837
2838 IEM_MC_BEGIN(0, 0);
2839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2840 IEM_MC_ADVANCE_RIP();
2841 } IEM_MC_ELSE() {
2842 IEM_MC_REL_JMP_S8(i8Imm);
2843 } IEM_MC_ENDIF();
2844 IEM_MC_END();
2845 return VINF_SUCCESS;
2846}
2847
2848
2849/**
2850 * @opcode 0x74
2851 */
2852FNIEMOP_DEF(iemOp_je_Jb)
2853{
2854 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2858
2859 IEM_MC_BEGIN(0, 0);
2860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2861 IEM_MC_REL_JMP_S8(i8Imm);
2862 } IEM_MC_ELSE() {
2863 IEM_MC_ADVANCE_RIP();
2864 } IEM_MC_ENDIF();
2865 IEM_MC_END();
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/**
2871 * @opcode 0x75
2872 */
2873FNIEMOP_DEF(iemOp_jne_Jb)
2874{
2875 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2876 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2879
2880 IEM_MC_BEGIN(0, 0);
2881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2882 IEM_MC_ADVANCE_RIP();
2883 } IEM_MC_ELSE() {
2884 IEM_MC_REL_JMP_S8(i8Imm);
2885 } IEM_MC_ENDIF();
2886 IEM_MC_END();
2887 return VINF_SUCCESS;
2888}
2889
2890
2891/**
2892 * @opcode 0x76
2893 */
2894FNIEMOP_DEF(iemOp_jbe_Jb)
2895{
2896 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2900
2901 IEM_MC_BEGIN(0, 0);
2902 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2903 IEM_MC_REL_JMP_S8(i8Imm);
2904 } IEM_MC_ELSE() {
2905 IEM_MC_ADVANCE_RIP();
2906 } IEM_MC_ENDIF();
2907 IEM_MC_END();
2908 return VINF_SUCCESS;
2909}
2910
2911
2912/**
2913 * @opcode 0x77
2914 */
2915FNIEMOP_DEF(iemOp_jnbe_Jb)
2916{
2917 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2918 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2921
2922 IEM_MC_BEGIN(0, 0);
2923 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2924 IEM_MC_ADVANCE_RIP();
2925 } IEM_MC_ELSE() {
2926 IEM_MC_REL_JMP_S8(i8Imm);
2927 } IEM_MC_ENDIF();
2928 IEM_MC_END();
2929 return VINF_SUCCESS;
2930}
2931
2932
2933/**
2934 * @opcode 0x78
2935 */
2936FNIEMOP_DEF(iemOp_js_Jb)
2937{
2938 IEMOP_MNEMONIC(js_Jb, "js Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2945 IEM_MC_REL_JMP_S8(i8Imm);
2946 } IEM_MC_ELSE() {
2947 IEM_MC_ADVANCE_RIP();
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950 return VINF_SUCCESS;
2951}
2952
2953
2954/**
2955 * @opcode 0x79
2956 */
2957FNIEMOP_DEF(iemOp_jns_Jb)
2958{
2959 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2960 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2963
2964 IEM_MC_BEGIN(0, 0);
2965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2966 IEM_MC_ADVANCE_RIP();
2967 } IEM_MC_ELSE() {
2968 IEM_MC_REL_JMP_S8(i8Imm);
2969 } IEM_MC_ENDIF();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972}
2973
2974
2975/**
2976 * @opcode 0x7a
2977 */
2978FNIEMOP_DEF(iemOp_jp_Jb)
2979{
2980 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2981 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2984
2985 IEM_MC_BEGIN(0, 0);
2986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2987 IEM_MC_REL_JMP_S8(i8Imm);
2988 } IEM_MC_ELSE() {
2989 IEM_MC_ADVANCE_RIP();
2990 } IEM_MC_ENDIF();
2991 IEM_MC_END();
2992 return VINF_SUCCESS;
2993}
2994
2995
2996/**
2997 * @opcode 0x7b
2998 */
2999FNIEMOP_DEF(iemOp_jnp_Jb)
3000{
3001 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3002 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3005
3006 IEM_MC_BEGIN(0, 0);
3007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3008 IEM_MC_ADVANCE_RIP();
3009 } IEM_MC_ELSE() {
3010 IEM_MC_REL_JMP_S8(i8Imm);
3011 } IEM_MC_ENDIF();
3012 IEM_MC_END();
3013 return VINF_SUCCESS;
3014}
3015
3016
3017/**
3018 * @opcode 0x7c
3019 */
3020FNIEMOP_DEF(iemOp_jl_Jb)
3021{
3022 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3023 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3026
3027 IEM_MC_BEGIN(0, 0);
3028 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3029 IEM_MC_REL_JMP_S8(i8Imm);
3030 } IEM_MC_ELSE() {
3031 IEM_MC_ADVANCE_RIP();
3032 } IEM_MC_ENDIF();
3033 IEM_MC_END();
3034 return VINF_SUCCESS;
3035}
3036
3037
3038/**
3039 * @opcode 0x7d
3040 */
3041FNIEMOP_DEF(iemOp_jnl_Jb)
3042{
3043 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3047
3048 IEM_MC_BEGIN(0, 0);
3049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3050 IEM_MC_ADVANCE_RIP();
3051 } IEM_MC_ELSE() {
3052 IEM_MC_REL_JMP_S8(i8Imm);
3053 } IEM_MC_ENDIF();
3054 IEM_MC_END();
3055 return VINF_SUCCESS;
3056}
3057
3058
3059/**
3060 * @opcode 0x7e
3061 */
3062FNIEMOP_DEF(iemOp_jle_Jb)
3063{
3064 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3065 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3068
3069 IEM_MC_BEGIN(0, 0);
3070 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3071 IEM_MC_REL_JMP_S8(i8Imm);
3072 } IEM_MC_ELSE() {
3073 IEM_MC_ADVANCE_RIP();
3074 } IEM_MC_ENDIF();
3075 IEM_MC_END();
3076 return VINF_SUCCESS;
3077}
3078
3079
3080/**
3081 * @opcode 0x7f
3082 */
3083FNIEMOP_DEF(iemOp_jnle_Jb)
3084{
3085 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3086 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3092 IEM_MC_ADVANCE_RIP();
3093 } IEM_MC_ELSE() {
3094 IEM_MC_REL_JMP_S8(i8Imm);
3095 } IEM_MC_ENDIF();
3096 IEM_MC_END();
3097 return VINF_SUCCESS;
3098}
3099
3100
3101/**
3102 * @opcode 0x80
3103 */
3104FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3105{
3106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3108 {
3109 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3110 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3111 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3112 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3113 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3114 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3115 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3116 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3117 }
3118 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3119
3120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3121 {
3122 /* register target */
3123 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_BEGIN(3, 0);
3126 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3127 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3129
3130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3131 IEM_MC_REF_EFLAGS(pEFlags);
3132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3133
3134 IEM_MC_ADVANCE_RIP();
3135 IEM_MC_END();
3136 }
3137 else
3138 {
3139 /* memory target */
3140 uint32_t fAccess;
3141 if (pImpl->pfnLockedU8)
3142 fAccess = IEM_ACCESS_DATA_RW;
3143 else /* CMP */
3144 fAccess = IEM_ACCESS_DATA_R;
3145 IEM_MC_BEGIN(3, 2);
3146 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3149
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3152 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3153 if (pImpl->pfnLockedU8)
3154 IEMOP_HLP_DONE_DECODING();
3155 else
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157
3158 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3159 IEM_MC_FETCH_EFLAGS(EFlags);
3160 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3162 else
3163 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3164
3165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3166 IEM_MC_COMMIT_EFLAGS(EFlags);
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 }
3170 return VINF_SUCCESS;
3171}
3172
3173
3174/**
3175 * @opcode 0x81
3176 */
3177FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3181 {
3182 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3183 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3184 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3185 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3186 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3187 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3188 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3189 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3190 }
3191 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3192
3193 switch (pVCpu->iem.s.enmEffOpSize)
3194 {
3195 case IEMMODE_16BIT:
3196 {
3197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3198 {
3199 /* register target */
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202 IEM_MC_BEGIN(3, 0);
3203 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3204 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3206
3207 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3208 IEM_MC_REF_EFLAGS(pEFlags);
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /* memory target */
3217 uint32_t fAccess;
3218 if (pImpl->pfnLockedU16)
3219 fAccess = IEM_ACCESS_DATA_RW;
3220 else /* CMP, TEST */
3221 fAccess = IEM_ACCESS_DATA_R;
3222 IEM_MC_BEGIN(3, 2);
3223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3224 IEM_MC_ARG(uint16_t, u16Src, 1);
3225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3229 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3230 IEM_MC_ASSIGN(u16Src, u16Imm);
3231 if (pImpl->pfnLockedU16)
3232 IEMOP_HLP_DONE_DECODING();
3233 else
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3236 IEM_MC_FETCH_EFLAGS(EFlags);
3237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3239 else
3240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3241
3242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3243 IEM_MC_COMMIT_EFLAGS(EFlags);
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 }
3247 break;
3248 }
3249
3250 case IEMMODE_32BIT:
3251 {
3252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3253 {
3254 /* register target */
3255 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_BEGIN(3, 0);
3258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3259 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3261
3262 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3265 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3266
3267 IEM_MC_ADVANCE_RIP();
3268 IEM_MC_END();
3269 }
3270 else
3271 {
3272 /* memory target */
3273 uint32_t fAccess;
3274 if (pImpl->pfnLockedU32)
3275 fAccess = IEM_ACCESS_DATA_RW;
3276 else /* CMP, TEST */
3277 fAccess = IEM_ACCESS_DATA_R;
3278 IEM_MC_BEGIN(3, 2);
3279 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3280 IEM_MC_ARG(uint32_t, u32Src, 1);
3281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3283
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3285 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3286 IEM_MC_ASSIGN(u32Src, u32Imm);
3287 if (pImpl->pfnLockedU32)
3288 IEMOP_HLP_DONE_DECODING();
3289 else
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3292 IEM_MC_FETCH_EFLAGS(EFlags);
3293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3295 else
3296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3297
3298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3299 IEM_MC_COMMIT_EFLAGS(EFlags);
3300 IEM_MC_ADVANCE_RIP();
3301 IEM_MC_END();
3302 }
3303 break;
3304 }
3305
3306 case IEMMODE_64BIT:
3307 {
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 /* register target */
3311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_BEGIN(3, 0);
3314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3315 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3317
3318 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3319 IEM_MC_REF_EFLAGS(pEFlags);
3320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3321
3322 IEM_MC_ADVANCE_RIP();
3323 IEM_MC_END();
3324 }
3325 else
3326 {
3327 /* memory target */
3328 uint32_t fAccess;
3329 if (pImpl->pfnLockedU64)
3330 fAccess = IEM_ACCESS_DATA_RW;
3331 else /* CMP */
3332 fAccess = IEM_ACCESS_DATA_R;
3333 IEM_MC_BEGIN(3, 2);
3334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3335 IEM_MC_ARG(uint64_t, u64Src, 1);
3336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3338
3339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3341 if (pImpl->pfnLockedU64)
3342 IEMOP_HLP_DONE_DECODING();
3343 else
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3345 IEM_MC_ASSIGN(u64Src, u64Imm);
3346 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3347 IEM_MC_FETCH_EFLAGS(EFlags);
3348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3350 else
3351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3352
3353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3354 IEM_MC_COMMIT_EFLAGS(EFlags);
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 }
3358 break;
3359 }
3360 }
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/**
3366 * @opcode 0x82
3367 * @opmnemonic grp1_82
3368 * @opgroup og_groups
3369 */
3370FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3371{
3372 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3373 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3374}
3375
3376
3377/**
3378 * @opcode 0x83
3379 */
3380FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3381{
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3384 {
3385 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3386 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3387 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3388 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3389 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3390 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3391 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3392 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3393 }
3394 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3395 to the 386 even if absent in the intel reference manuals and some
3396 3rd party opcode listings. */
3397 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3398
3399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3400 {
3401 /*
3402 * Register target
3403 */
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3406 switch (pVCpu->iem.s.enmEffOpSize)
3407 {
3408 case IEMMODE_16BIT:
3409 {
3410 IEM_MC_BEGIN(3, 0);
3411 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3412 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3413 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3414
3415 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3416 IEM_MC_REF_EFLAGS(pEFlags);
3417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3418
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 break;
3422 }
3423
3424 case IEMMODE_32BIT:
3425 {
3426 IEM_MC_BEGIN(3, 0);
3427 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3428 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3429 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3430
3431 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3432 IEM_MC_REF_EFLAGS(pEFlags);
3433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3435
3436 IEM_MC_ADVANCE_RIP();
3437 IEM_MC_END();
3438 break;
3439 }
3440
3441 case IEMMODE_64BIT:
3442 {
3443 IEM_MC_BEGIN(3, 0);
3444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3445 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3446 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3447
3448 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3449 IEM_MC_REF_EFLAGS(pEFlags);
3450 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 break;
3455 }
3456 }
3457 }
3458 else
3459 {
3460 /*
3461 * Memory target.
3462 */
3463 uint32_t fAccess;
3464 if (pImpl->pfnLockedU16)
3465 fAccess = IEM_ACCESS_DATA_RW;
3466 else /* CMP */
3467 fAccess = IEM_ACCESS_DATA_R;
3468
3469 switch (pVCpu->iem.s.enmEffOpSize)
3470 {
3471 case IEMMODE_16BIT:
3472 {
3473 IEM_MC_BEGIN(3, 2);
3474 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3475 IEM_MC_ARG(uint16_t, u16Src, 1);
3476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3481 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3482 if (pImpl->pfnLockedU16)
3483 IEMOP_HLP_DONE_DECODING();
3484 else
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3487 IEM_MC_FETCH_EFLAGS(EFlags);
3488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3490 else
3491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3492
3493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3494 IEM_MC_COMMIT_EFLAGS(EFlags);
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 break;
3498 }
3499
3500 case IEMMODE_32BIT:
3501 {
3502 IEM_MC_BEGIN(3, 2);
3503 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3504 IEM_MC_ARG(uint32_t, u32Src, 1);
3505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3510 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3511 if (pImpl->pfnLockedU32)
3512 IEMOP_HLP_DONE_DECODING();
3513 else
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3516 IEM_MC_FETCH_EFLAGS(EFlags);
3517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3519 else
3520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3521
3522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3523 IEM_MC_COMMIT_EFLAGS(EFlags);
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 break;
3527 }
3528
3529 case IEMMODE_64BIT:
3530 {
3531 IEM_MC_BEGIN(3, 2);
3532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3533 IEM_MC_ARG(uint64_t, u64Src, 1);
3534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3536
3537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3538 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3539 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3540 if (pImpl->pfnLockedU64)
3541 IEMOP_HLP_DONE_DECODING();
3542 else
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3545 IEM_MC_FETCH_EFLAGS(EFlags);
3546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3548 else
3549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3550
3551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3552 IEM_MC_COMMIT_EFLAGS(EFlags);
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 break;
3556 }
3557 }
3558 }
3559 return VINF_SUCCESS;
3560}
3561
3562
3563/**
3564 * @opcode 0x84
3565 */
3566FNIEMOP_DEF(iemOp_test_Eb_Gb)
3567{
3568 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3571}
3572
3573
3574/**
3575 * @opcode 0x85
3576 */
3577FNIEMOP_DEF(iemOp_test_Ev_Gv)
3578{
3579 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3582}
3583
3584
3585/**
3586 * @opcode 0x86
3587 */
3588FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3589{
3590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3591 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3592
3593 /*
3594 * If rm is denoting a register, no more instruction bytes.
3595 */
3596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3597 {
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599
3600 IEM_MC_BEGIN(0, 2);
3601 IEM_MC_LOCAL(uint8_t, uTmp1);
3602 IEM_MC_LOCAL(uint8_t, uTmp2);
3603
3604 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3605 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3606 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3607 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /*
3615 * We're accessing memory.
3616 */
3617/** @todo the register must be committed separately! */
3618 IEM_MC_BEGIN(2, 2);
3619 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3620 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3622
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3624 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3625 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3626 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3628
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 return VINF_SUCCESS;
3633}
3634
3635
3636/**
3637 * @opcode 0x87
3638 */
3639FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3640{
3641 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643
3644 /*
3645 * If rm is denoting a register, no more instruction bytes.
3646 */
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650
3651 switch (pVCpu->iem.s.enmEffOpSize)
3652 {
3653 case IEMMODE_16BIT:
3654 IEM_MC_BEGIN(0, 2);
3655 IEM_MC_LOCAL(uint16_t, uTmp1);
3656 IEM_MC_LOCAL(uint16_t, uTmp2);
3657
3658 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3659 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3660 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3661 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3662
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 return VINF_SUCCESS;
3666
3667 case IEMMODE_32BIT:
3668 IEM_MC_BEGIN(0, 2);
3669 IEM_MC_LOCAL(uint32_t, uTmp1);
3670 IEM_MC_LOCAL(uint32_t, uTmp2);
3671
3672 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3673 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3674 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3675 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3676
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 return VINF_SUCCESS;
3680
3681 case IEMMODE_64BIT:
3682 IEM_MC_BEGIN(0, 2);
3683 IEM_MC_LOCAL(uint64_t, uTmp1);
3684 IEM_MC_LOCAL(uint64_t, uTmp2);
3685
3686 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3687 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3688 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3689 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3690
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 return VINF_SUCCESS;
3694
3695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3696 }
3697 }
3698 else
3699 {
3700 /*
3701 * We're accessing memory.
3702 */
3703 switch (pVCpu->iem.s.enmEffOpSize)
3704 {
3705/** @todo the register must be committed separately! */
3706 case IEMMODE_16BIT:
3707 IEM_MC_BEGIN(2, 2);
3708 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3709 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3713 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3714 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 return VINF_SUCCESS;
3721
3722 case IEMMODE_32BIT:
3723 IEM_MC_BEGIN(2, 2);
3724 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3725 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3729 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3730 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3733
3734 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3735 IEM_MC_ADVANCE_RIP();
3736 IEM_MC_END();
3737 return VINF_SUCCESS;
3738
3739 case IEMMODE_64BIT:
3740 IEM_MC_BEGIN(2, 2);
3741 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3742 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744
3745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3746 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3747 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3748 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 return VINF_SUCCESS;
3754
3755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3756 }
3757 }
3758}
3759
3760
3761/**
3762 * @opcode 0x88
3763 */
3764FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3765{
3766 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3767
3768 uint8_t bRm;
3769 IEM_OPCODE_GET_NEXT_U8(&bRm);
3770
3771 /*
3772 * If rm is denoting a register, no more instruction bytes.
3773 */
3774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3775 {
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777 IEM_MC_BEGIN(0, 1);
3778 IEM_MC_LOCAL(uint8_t, u8Value);
3779 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3780 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /*
3787 * We're writing a register to memory.
3788 */
3789 IEM_MC_BEGIN(0, 2);
3790 IEM_MC_LOCAL(uint8_t, u8Value);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3795 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3796 IEM_MC_ADVANCE_RIP();
3797 IEM_MC_END();
3798 }
3799 return VINF_SUCCESS;
3800
3801}
3802
3803
3804/**
3805 * @opcode 0x89
3806 */
3807FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3808{
3809 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3810
3811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3812
3813 /*
3814 * If rm is denoting a register, no more instruction bytes.
3815 */
3816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3817 {
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 switch (pVCpu->iem.s.enmEffOpSize)
3820 {
3821 case IEMMODE_16BIT:
3822 IEM_MC_BEGIN(0, 1);
3823 IEM_MC_LOCAL(uint16_t, u16Value);
3824 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3825 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 break;
3829
3830 case IEMMODE_32BIT:
3831 IEM_MC_BEGIN(0, 1);
3832 IEM_MC_LOCAL(uint32_t, u32Value);
3833 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3834 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 break;
3838
3839 case IEMMODE_64BIT:
3840 IEM_MC_BEGIN(0, 1);
3841 IEM_MC_LOCAL(uint64_t, u64Value);
3842 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3843 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 break;
3847 }
3848 }
3849 else
3850 {
3851 /*
3852 * We're writing a register to memory.
3853 */
3854 switch (pVCpu->iem.s.enmEffOpSize)
3855 {
3856 case IEMMODE_16BIT:
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(uint16_t, u16Value);
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3863 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 break;
3867
3868 case IEMMODE_32BIT:
3869 IEM_MC_BEGIN(0, 2);
3870 IEM_MC_LOCAL(uint32_t, u32Value);
3871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3875 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 break;
3879
3880 case IEMMODE_64BIT:
3881 IEM_MC_BEGIN(0, 2);
3882 IEM_MC_LOCAL(uint64_t, u64Value);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3887 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 break;
3891 }
3892 }
3893 return VINF_SUCCESS;
3894}
3895
3896
3897/**
3898 * @opcode 0x8a
3899 */
3900FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3901{
3902 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3903
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905
3906 /*
3907 * If rm is denoting a register, no more instruction bytes.
3908 */
3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3910 {
3911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3912 IEM_MC_BEGIN(0, 1);
3913 IEM_MC_LOCAL(uint8_t, u8Value);
3914 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3915 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3916 IEM_MC_ADVANCE_RIP();
3917 IEM_MC_END();
3918 }
3919 else
3920 {
3921 /*
3922 * We're loading a register from memory.
3923 */
3924 IEM_MC_BEGIN(0, 2);
3925 IEM_MC_LOCAL(uint8_t, u8Value);
3926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3929 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3930 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 }
3934 return VINF_SUCCESS;
3935}
3936
3937
3938/**
3939 * @opcode 0x8b
3940 */
3941FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3942{
3943 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3944
3945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3946
3947 /*
3948 * If rm is denoting a register, no more instruction bytes.
3949 */
3950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3951 {
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953 switch (pVCpu->iem.s.enmEffOpSize)
3954 {
3955 case IEMMODE_16BIT:
3956 IEM_MC_BEGIN(0, 1);
3957 IEM_MC_LOCAL(uint16_t, u16Value);
3958 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3959 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3960 IEM_MC_ADVANCE_RIP();
3961 IEM_MC_END();
3962 break;
3963
3964 case IEMMODE_32BIT:
3965 IEM_MC_BEGIN(0, 1);
3966 IEM_MC_LOCAL(uint32_t, u32Value);
3967 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 break;
3972
3973 case IEMMODE_64BIT:
3974 IEM_MC_BEGIN(0, 1);
3975 IEM_MC_LOCAL(uint64_t, u64Value);
3976 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3977 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 break;
3981 }
3982 }
3983 else
3984 {
3985 /*
3986 * We're loading a register from memory.
3987 */
3988 switch (pVCpu->iem.s.enmEffOpSize)
3989 {
3990 case IEMMODE_16BIT:
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(uint16_t, u16Value);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 break;
4001
4002 case IEMMODE_32BIT:
4003 IEM_MC_BEGIN(0, 2);
4004 IEM_MC_LOCAL(uint32_t, u32Value);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4009 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 break;
4013
4014 case IEMMODE_64BIT:
4015 IEM_MC_BEGIN(0, 2);
4016 IEM_MC_LOCAL(uint64_t, u64Value);
4017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4021 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 break;
4025 }
4026 }
4027 return VINF_SUCCESS;
4028}
4029
4030
4031/**
4032 * opcode 0x63
4033 * @todo Table fixme
4034 */
4035FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4036{
4037 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4038 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4039 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4040 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4041 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4042}
4043
4044
4045/**
4046 * @opcode 0x8c
4047 */
4048FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4049{
4050 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4051
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053
4054 /*
4055 * Check that the destination register exists. The REX.R prefix is ignored.
4056 */
4057 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4058 if ( iSegReg > X86_SREG_GS)
4059 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4060
4061 /*
4062 * If rm is denoting a register, no more instruction bytes.
4063 * In that case, the operand size is respected and the upper bits are
4064 * cleared (starting with some pentium).
4065 */
4066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4067 {
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069 switch (pVCpu->iem.s.enmEffOpSize)
4070 {
4071 case IEMMODE_16BIT:
4072 IEM_MC_BEGIN(0, 1);
4073 IEM_MC_LOCAL(uint16_t, u16Value);
4074 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4075 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 break;
4079
4080 case IEMMODE_32BIT:
4081 IEM_MC_BEGIN(0, 1);
4082 IEM_MC_LOCAL(uint32_t, u32Value);
4083 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4084 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 break;
4088
4089 case IEMMODE_64BIT:
4090 IEM_MC_BEGIN(0, 1);
4091 IEM_MC_LOCAL(uint64_t, u64Value);
4092 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4093 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4094 IEM_MC_ADVANCE_RIP();
4095 IEM_MC_END();
4096 break;
4097 }
4098 }
4099 else
4100 {
4101 /*
4102 * We're saving the register to memory. The access is word sized
4103 * regardless of operand size prefixes.
4104 */
4105#if 0 /* not necessary */
4106 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4107#endif
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(uint16_t, u16Value);
4110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4114 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 return VINF_SUCCESS;
4119}
4120
4121
4122
4123
4124/**
4125 * @opcode 0x8d
4126 */
4127FNIEMOP_DEF(iemOp_lea_Gv_M)
4128{
4129 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4132 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4133
4134 switch (pVCpu->iem.s.enmEffOpSize)
4135 {
4136 case IEMMODE_16BIT:
4137 IEM_MC_BEGIN(0, 2);
4138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4139 IEM_MC_LOCAL(uint16_t, u16Cast);
4140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4142 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4143 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 return VINF_SUCCESS;
4147
4148 case IEMMODE_32BIT:
4149 IEM_MC_BEGIN(0, 2);
4150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4151 IEM_MC_LOCAL(uint32_t, u32Cast);
4152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 return VINF_SUCCESS;
4159
4160 case IEMMODE_64BIT:
4161 IEM_MC_BEGIN(0, 1);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4166 IEM_MC_ADVANCE_RIP();
4167 IEM_MC_END();
4168 return VINF_SUCCESS;
4169 }
4170 AssertFailedReturn(VERR_IEM_IPE_7);
4171}
4172
4173
4174/**
4175 * @opcode 0x8e
4176 */
4177FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4178{
4179 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4180
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182
4183 /*
4184 * The practical operand size is 16-bit.
4185 */
4186#if 0 /* not necessary */
4187 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4188#endif
4189
4190 /*
4191 * Check that the destination register exists and can be used with this
4192 * instruction. The REX.R prefix is ignored.
4193 */
4194 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4195 if ( iSegReg == X86_SREG_CS
4196 || iSegReg > X86_SREG_GS)
4197 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4198
4199 /*
4200 * If rm is denoting a register, no more instruction bytes.
4201 */
4202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4203 {
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_BEGIN(2, 0);
4206 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4207 IEM_MC_ARG(uint16_t, u16Value, 1);
4208 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4209 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4210 IEM_MC_END();
4211 }
4212 else
4213 {
4214 /*
4215 * We're loading the register from memory. The access is word sized
4216 * regardless of operand size prefixes.
4217 */
4218 IEM_MC_BEGIN(2, 1);
4219 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4220 IEM_MC_ARG(uint16_t, u16Value, 1);
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4225 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4226 IEM_MC_END();
4227 }
4228 return VINF_SUCCESS;
4229}
4230
4231
4232/** Opcode 0x8f /0. */
4233FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4234{
4235 /* This bugger is rather annoying as it requires rSP to be updated before
4236 doing the effective address calculations. Will eventually require a
4237 split between the R/M+SIB decoding and the effective address
4238 calculation - which is something that is required for any attempt at
4239 reusing this code for a recompiler. It may also be good to have if we
4240 need to delay #UD exception caused by invalid lock prefixes.
4241
4242 For now, we'll do a mostly safe interpreter-only implementation here. */
4243 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4244 * now until tests show it's checked.. */
4245 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4246
4247 /* Register access is relatively easy and can share code. */
4248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4249 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4250
4251 /*
4252 * Memory target.
4253 *
4254 * Intel says that RSP is incremented before it's used in any effective
4255 * address calcuations. This means some serious extra annoyance here since
4256 * we decode and calculate the effective address in one step and like to
4257 * delay committing registers till everything is done.
4258 *
4259 * So, we'll decode and calculate the effective address twice. This will
4260 * require some recoding if turned into a recompiler.
4261 */
4262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4263
4264#ifndef TST_IEM_CHECK_MC
4265 /* Calc effective address with modified ESP. */
4266/** @todo testcase */
4267 RTGCPTR GCPtrEff;
4268 VBOXSTRICTRC rcStrict;
4269 switch (pVCpu->iem.s.enmEffOpSize)
4270 {
4271 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4272 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4273 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4275 }
4276 if (rcStrict != VINF_SUCCESS)
4277 return rcStrict;
4278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4279
4280 /* Perform the operation - this should be CImpl. */
4281 RTUINT64U TmpRsp;
4282 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4283 switch (pVCpu->iem.s.enmEffOpSize)
4284 {
4285 case IEMMODE_16BIT:
4286 {
4287 uint16_t u16Value;
4288 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4289 if (rcStrict == VINF_SUCCESS)
4290 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4291 break;
4292 }
4293
4294 case IEMMODE_32BIT:
4295 {
4296 uint32_t u32Value;
4297 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4298 if (rcStrict == VINF_SUCCESS)
4299 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4300 break;
4301 }
4302
4303 case IEMMODE_64BIT:
4304 {
4305 uint64_t u64Value;
4306 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4307 if (rcStrict == VINF_SUCCESS)
4308 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4309 break;
4310 }
4311
4312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4313 }
4314 if (rcStrict == VINF_SUCCESS)
4315 {
4316 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4317 iemRegUpdateRipAndClearRF(pVCpu);
4318 }
4319 return rcStrict;
4320
4321#else
4322 return VERR_IEM_IPE_2;
4323#endif
4324}
4325
4326
4327/**
4328 * @opcode 0x8f
4329 */
4330FNIEMOP_DEF(iemOp_Grp1A__xop)
4331{
4332 /*
4333 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4334 * three byte VEX prefix, except that the mmmmm field cannot have the values
4335 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4336 */
4337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4338 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4339 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4340
4341 IEMOP_MNEMONIC(xop, "xop");
4342 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4343 {
4344 /** @todo Test when exctly the XOP conformance checks kick in during
4345 * instruction decoding and fetching (using \#PF). */
4346 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4347 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4348 if ( ( pVCpu->iem.s.fPrefixes
4349 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4350 == 0)
4351 {
4352 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4353 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4355 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4356 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4357 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4358 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4359 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4360 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4361
4362 /** @todo XOP: Just use new tables and decoders. */
4363 switch (bRm & 0x1f)
4364 {
4365 case 8: /* xop opcode map 8. */
4366 IEMOP_BITCH_ABOUT_STUB();
4367 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4368
4369 case 9: /* xop opcode map 9. */
4370 IEMOP_BITCH_ABOUT_STUB();
4371 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4372
4373 case 10: /* xop opcode map 10. */
4374 IEMOP_BITCH_ABOUT_STUB();
4375 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4376
4377 default:
4378 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4379 return IEMOP_RAISE_INVALID_OPCODE();
4380 }
4381 }
4382 else
4383 Log(("XOP: Invalid prefix mix!\n"));
4384 }
4385 else
4386 Log(("XOP: XOP support disabled!\n"));
4387 return IEMOP_RAISE_INVALID_OPCODE();
4388}
4389
4390
4391/**
4392 * Common 'xchg reg,rAX' helper.
4393 */
4394FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4395{
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397
4398 iReg |= pVCpu->iem.s.uRexB;
4399 switch (pVCpu->iem.s.enmEffOpSize)
4400 {
4401 case IEMMODE_16BIT:
4402 IEM_MC_BEGIN(0, 2);
4403 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4404 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4405 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4406 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4407 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4408 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4409 IEM_MC_ADVANCE_RIP();
4410 IEM_MC_END();
4411 return VINF_SUCCESS;
4412
4413 case IEMMODE_32BIT:
4414 IEM_MC_BEGIN(0, 2);
4415 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4416 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4417 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4418 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4419 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4420 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4421 IEM_MC_ADVANCE_RIP();
4422 IEM_MC_END();
4423 return VINF_SUCCESS;
4424
4425 case IEMMODE_64BIT:
4426 IEM_MC_BEGIN(0, 2);
4427 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4428 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4429 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4430 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4431 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4432 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4433 IEM_MC_ADVANCE_RIP();
4434 IEM_MC_END();
4435 return VINF_SUCCESS;
4436
4437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4438 }
4439}
4440
4441
4442/**
4443 * @opcode 0x90
4444 */
4445FNIEMOP_DEF(iemOp_nop)
4446{
4447 /* R8/R8D and RAX/EAX can be exchanged. */
4448 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4449 {
4450 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4451 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4452 }
4453
4454 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4455 {
4456 IEMOP_MNEMONIC(pause, "pause");
4457#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4458 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4459 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4460#endif
4461 }
4462 else
4463 IEMOP_MNEMONIC(nop, "nop");
4464 IEM_MC_BEGIN(0, 0);
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/**
4472 * @opcode 0x91
4473 */
4474FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4475{
4476 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4477 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4478}
4479
4480
4481/**
4482 * @opcode 0x92
4483 */
4484FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4485{
4486 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4487 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4488}
4489
4490
4491/**
4492 * @opcode 0x93
4493 */
4494FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4495{
4496 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4498}
4499
4500
4501/**
4502 * @opcode 0x94
4503 */
4504FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4505{
4506 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4507 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4508}
4509
4510
4511/**
4512 * @opcode 0x95
4513 */
4514FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4515{
4516 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4517 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4518}
4519
4520
4521/**
4522 * @opcode 0x96
4523 */
4524FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4525{
4526 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4527 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4528}
4529
4530
4531/**
4532 * @opcode 0x97
4533 */
4534FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4535{
4536 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4538}
4539
4540
4541/**
4542 * @opcode 0x98
4543 */
4544FNIEMOP_DEF(iemOp_cbw)
4545{
4546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4547 switch (pVCpu->iem.s.enmEffOpSize)
4548 {
4549 case IEMMODE_16BIT:
4550 IEMOP_MNEMONIC(cbw, "cbw");
4551 IEM_MC_BEGIN(0, 1);
4552 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4553 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4554 } IEM_MC_ELSE() {
4555 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4556 } IEM_MC_ENDIF();
4557 IEM_MC_ADVANCE_RIP();
4558 IEM_MC_END();
4559 return VINF_SUCCESS;
4560
4561 case IEMMODE_32BIT:
4562 IEMOP_MNEMONIC(cwde, "cwde");
4563 IEM_MC_BEGIN(0, 1);
4564 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4565 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4566 } IEM_MC_ELSE() {
4567 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4568 } IEM_MC_ENDIF();
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 return VINF_SUCCESS;
4572
4573 case IEMMODE_64BIT:
4574 IEMOP_MNEMONIC(cdqe, "cdqe");
4575 IEM_MC_BEGIN(0, 1);
4576 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4577 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4578 } IEM_MC_ELSE() {
4579 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4580 } IEM_MC_ENDIF();
4581 IEM_MC_ADVANCE_RIP();
4582 IEM_MC_END();
4583 return VINF_SUCCESS;
4584
4585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4586 }
4587}
4588
4589
4590/**
4591 * @opcode 0x99
4592 */
4593FNIEMOP_DEF(iemOp_cwd)
4594{
4595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4596 switch (pVCpu->iem.s.enmEffOpSize)
4597 {
4598 case IEMMODE_16BIT:
4599 IEMOP_MNEMONIC(cwd, "cwd");
4600 IEM_MC_BEGIN(0, 1);
4601 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4602 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4603 } IEM_MC_ELSE() {
4604 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4605 } IEM_MC_ENDIF();
4606 IEM_MC_ADVANCE_RIP();
4607 IEM_MC_END();
4608 return VINF_SUCCESS;
4609
4610 case IEMMODE_32BIT:
4611 IEMOP_MNEMONIC(cdq, "cdq");
4612 IEM_MC_BEGIN(0, 1);
4613 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4614 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4615 } IEM_MC_ELSE() {
4616 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4617 } IEM_MC_ENDIF();
4618 IEM_MC_ADVANCE_RIP();
4619 IEM_MC_END();
4620 return VINF_SUCCESS;
4621
4622 case IEMMODE_64BIT:
4623 IEMOP_MNEMONIC(cqo, "cqo");
4624 IEM_MC_BEGIN(0, 1);
4625 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4626 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4627 } IEM_MC_ELSE() {
4628 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4629 } IEM_MC_ENDIF();
4630 IEM_MC_ADVANCE_RIP();
4631 IEM_MC_END();
4632 return VINF_SUCCESS;
4633
4634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4635 }
4636}
4637
4638
4639/**
4640 * @opcode 0x9a
4641 */
4642FNIEMOP_DEF(iemOp_call_Ap)
4643{
4644 IEMOP_MNEMONIC(call_Ap, "call Ap");
4645 IEMOP_HLP_NO_64BIT();
4646
4647 /* Decode the far pointer address and pass it on to the far call C implementation. */
4648 uint32_t offSeg;
4649 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4650 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4651 else
4652 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4653 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4655 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4656}
4657
4658
4659/** Opcode 0x9b. (aka fwait) */
4660FNIEMOP_DEF(iemOp_wait)
4661{
4662 IEMOP_MNEMONIC(wait, "wait");
4663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4664
4665 IEM_MC_BEGIN(0, 0);
4666 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 return VINF_SUCCESS;
4671}
4672
4673
4674/**
4675 * @opcode 0x9c
4676 */
4677FNIEMOP_DEF(iemOp_pushf_Fv)
4678{
4679 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4681 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4682 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4683}
4684
4685
4686/**
4687 * @opcode 0x9d
4688 */
4689FNIEMOP_DEF(iemOp_popf_Fv)
4690{
4691 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4694 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4695}
4696
4697
4698/**
4699 * @opcode 0x9e
4700 */
4701FNIEMOP_DEF(iemOp_sahf)
4702{
4703 IEMOP_MNEMONIC(sahf, "sahf");
4704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4705 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4706 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4707 return IEMOP_RAISE_INVALID_OPCODE();
4708 IEM_MC_BEGIN(0, 2);
4709 IEM_MC_LOCAL(uint32_t, u32Flags);
4710 IEM_MC_LOCAL(uint32_t, EFlags);
4711 IEM_MC_FETCH_EFLAGS(EFlags);
4712 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4713 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4714 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4715 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4716 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4717 IEM_MC_COMMIT_EFLAGS(EFlags);
4718 IEM_MC_ADVANCE_RIP();
4719 IEM_MC_END();
4720 return VINF_SUCCESS;
4721}
4722
4723
4724/**
4725 * @opcode 0x9f
4726 */
4727FNIEMOP_DEF(iemOp_lahf)
4728{
4729 IEMOP_MNEMONIC(lahf, "lahf");
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4731 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4732 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4733 return IEMOP_RAISE_INVALID_OPCODE();
4734 IEM_MC_BEGIN(0, 1);
4735 IEM_MC_LOCAL(uint8_t, u8Flags);
4736 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4737 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4738 IEM_MC_ADVANCE_RIP();
4739 IEM_MC_END();
4740 return VINF_SUCCESS;
4741}
4742
4743
4744/**
4745 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4746 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4747 * prefixes. Will return on failures.
4748 * @param a_GCPtrMemOff The variable to store the offset in.
4749 */
4750#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4751 do \
4752 { \
4753 switch (pVCpu->iem.s.enmEffAddrMode) \
4754 { \
4755 case IEMMODE_16BIT: \
4756 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4757 break; \
4758 case IEMMODE_32BIT: \
4759 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4760 break; \
4761 case IEMMODE_64BIT: \
4762 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4763 break; \
4764 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4765 } \
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4767 } while (0)
4768
4769/**
4770 * @opcode 0xa0
4771 */
4772FNIEMOP_DEF(iemOp_mov_AL_Ob)
4773{
4774 /*
4775 * Get the offset and fend off lock prefixes.
4776 */
4777 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4778 RTGCPTR GCPtrMemOff;
4779 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4780
4781 /*
4782 * Fetch AL.
4783 */
4784 IEM_MC_BEGIN(0,1);
4785 IEM_MC_LOCAL(uint8_t, u8Tmp);
4786 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4787 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 return VINF_SUCCESS;
4791}
4792
4793
4794/**
4795 * @opcode 0xa1
4796 */
4797FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4798{
4799 /*
4800 * Get the offset and fend off lock prefixes.
4801 */
4802 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4803 RTGCPTR GCPtrMemOff;
4804 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4805
4806 /*
4807 * Fetch rAX.
4808 */
4809 switch (pVCpu->iem.s.enmEffOpSize)
4810 {
4811 case IEMMODE_16BIT:
4812 IEM_MC_BEGIN(0,1);
4813 IEM_MC_LOCAL(uint16_t, u16Tmp);
4814 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4815 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4816 IEM_MC_ADVANCE_RIP();
4817 IEM_MC_END();
4818 return VINF_SUCCESS;
4819
4820 case IEMMODE_32BIT:
4821 IEM_MC_BEGIN(0,1);
4822 IEM_MC_LOCAL(uint32_t, u32Tmp);
4823 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4824 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4825 IEM_MC_ADVANCE_RIP();
4826 IEM_MC_END();
4827 return VINF_SUCCESS;
4828
4829 case IEMMODE_64BIT:
4830 IEM_MC_BEGIN(0,1);
4831 IEM_MC_LOCAL(uint64_t, u64Tmp);
4832 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4833 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4834 IEM_MC_ADVANCE_RIP();
4835 IEM_MC_END();
4836 return VINF_SUCCESS;
4837
4838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4839 }
4840}
4841
4842
4843/**
4844 * @opcode 0xa2
4845 */
4846FNIEMOP_DEF(iemOp_mov_Ob_AL)
4847{
4848 /*
4849 * Get the offset and fend off lock prefixes.
4850 */
4851 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4852 RTGCPTR GCPtrMemOff;
4853 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4854
4855 /*
4856 * Store AL.
4857 */
4858 IEM_MC_BEGIN(0,1);
4859 IEM_MC_LOCAL(uint8_t, u8Tmp);
4860 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4861 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4862 IEM_MC_ADVANCE_RIP();
4863 IEM_MC_END();
4864 return VINF_SUCCESS;
4865}
4866
4867
4868/**
4869 * @opcode 0xa3
4870 */
4871FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4872{
4873 /*
4874 * Get the offset and fend off lock prefixes.
4875 */
4876 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4877 RTGCPTR GCPtrMemOff;
4878 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4879
4880 /*
4881 * Store rAX.
4882 */
4883 switch (pVCpu->iem.s.enmEffOpSize)
4884 {
4885 case IEMMODE_16BIT:
4886 IEM_MC_BEGIN(0,1);
4887 IEM_MC_LOCAL(uint16_t, u16Tmp);
4888 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4889 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4890 IEM_MC_ADVANCE_RIP();
4891 IEM_MC_END();
4892 return VINF_SUCCESS;
4893
4894 case IEMMODE_32BIT:
4895 IEM_MC_BEGIN(0,1);
4896 IEM_MC_LOCAL(uint32_t, u32Tmp);
4897 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4898 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 return VINF_SUCCESS;
4902
4903 case IEMMODE_64BIT:
4904 IEM_MC_BEGIN(0,1);
4905 IEM_MC_LOCAL(uint64_t, u64Tmp);
4906 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4907 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4908 IEM_MC_ADVANCE_RIP();
4909 IEM_MC_END();
4910 return VINF_SUCCESS;
4911
4912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4913 }
4914}
4915
4916/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4917#define IEM_MOVS_CASE(ValBits, AddrBits) \
4918 IEM_MC_BEGIN(0, 2); \
4919 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4920 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4921 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4922 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4923 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4924 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4925 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4926 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4927 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4928 } IEM_MC_ELSE() { \
4929 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4930 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4931 } IEM_MC_ENDIF(); \
4932 IEM_MC_ADVANCE_RIP(); \
4933 IEM_MC_END();
4934
4935/**
4936 * @opcode 0xa4
4937 */
4938FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4939{
4940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4941
4942 /*
4943 * Use the C implementation if a repeat prefix is encountered.
4944 */
4945 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4946 {
4947 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4948 switch (pVCpu->iem.s.enmEffAddrMode)
4949 {
4950 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4951 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4952 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4954 }
4955 }
4956 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4957
4958 /*
4959 * Sharing case implementation with movs[wdq] below.
4960 */
4961 switch (pVCpu->iem.s.enmEffAddrMode)
4962 {
4963 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4964 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4965 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4967 }
4968 return VINF_SUCCESS;
4969}
4970
4971
4972/**
4973 * @opcode 0xa5
4974 */
4975FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4976{
4977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4978
4979 /*
4980 * Use the C implementation if a repeat prefix is encountered.
4981 */
4982 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4983 {
4984 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4985 switch (pVCpu->iem.s.enmEffOpSize)
4986 {
4987 case IEMMODE_16BIT:
4988 switch (pVCpu->iem.s.enmEffAddrMode)
4989 {
4990 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4991 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4992 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4994 }
4995 break;
4996 case IEMMODE_32BIT:
4997 switch (pVCpu->iem.s.enmEffAddrMode)
4998 {
4999 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5000 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5001 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5003 }
5004 case IEMMODE_64BIT:
5005 switch (pVCpu->iem.s.enmEffAddrMode)
5006 {
5007 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5008 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5009 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5011 }
5012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5013 }
5014 }
5015 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5016
5017 /*
5018 * Annoying double switch here.
5019 * Using ugly macro for implementing the cases, sharing it with movsb.
5020 */
5021 switch (pVCpu->iem.s.enmEffOpSize)
5022 {
5023 case IEMMODE_16BIT:
5024 switch (pVCpu->iem.s.enmEffAddrMode)
5025 {
5026 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5027 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5028 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5030 }
5031 break;
5032
5033 case IEMMODE_32BIT:
5034 switch (pVCpu->iem.s.enmEffAddrMode)
5035 {
5036 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5037 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5038 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5040 }
5041 break;
5042
5043 case IEMMODE_64BIT:
5044 switch (pVCpu->iem.s.enmEffAddrMode)
5045 {
5046 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5047 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5048 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5050 }
5051 break;
5052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5053 }
5054 return VINF_SUCCESS;
5055}
5056
5057#undef IEM_MOVS_CASE
5058
5059/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5060#define IEM_CMPS_CASE(ValBits, AddrBits) \
5061 IEM_MC_BEGIN(3, 3); \
5062 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5063 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5064 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5065 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5066 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5067 \
5068 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5069 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5070 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5071 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5072 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5073 IEM_MC_REF_EFLAGS(pEFlags); \
5074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5075 \
5076 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5077 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5078 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5079 } IEM_MC_ELSE() { \
5080 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5081 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5082 } IEM_MC_ENDIF(); \
5083 IEM_MC_ADVANCE_RIP(); \
5084 IEM_MC_END(); \
5085
5086/**
5087 * @opcode 0xa6
5088 */
5089FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5090{
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092
5093 /*
5094 * Use the C implementation if a repeat prefix is encountered.
5095 */
5096 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5097 {
5098 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5099 switch (pVCpu->iem.s.enmEffAddrMode)
5100 {
5101 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5102 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5103 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5105 }
5106 }
5107 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5108 {
5109 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5110 switch (pVCpu->iem.s.enmEffAddrMode)
5111 {
5112 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5113 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5114 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5116 }
5117 }
5118 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5119
5120 /*
5121 * Sharing case implementation with cmps[wdq] below.
5122 */
5123 switch (pVCpu->iem.s.enmEffAddrMode)
5124 {
5125 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5126 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5127 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5129 }
5130 return VINF_SUCCESS;
5131
5132}
5133
5134
5135/**
5136 * @opcode 0xa7
5137 */
5138FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5139{
5140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5141
5142 /*
5143 * Use the C implementation if a repeat prefix is encountered.
5144 */
5145 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5146 {
5147 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5148 switch (pVCpu->iem.s.enmEffOpSize)
5149 {
5150 case IEMMODE_16BIT:
5151 switch (pVCpu->iem.s.enmEffAddrMode)
5152 {
5153 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5154 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5155 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5157 }
5158 break;
5159 case IEMMODE_32BIT:
5160 switch (pVCpu->iem.s.enmEffAddrMode)
5161 {
5162 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5166 }
5167 case IEMMODE_64BIT:
5168 switch (pVCpu->iem.s.enmEffAddrMode)
5169 {
5170 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5171 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5172 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5174 }
5175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5176 }
5177 }
5178
5179 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5180 {
5181 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5182 switch (pVCpu->iem.s.enmEffOpSize)
5183 {
5184 case IEMMODE_16BIT:
5185 switch (pVCpu->iem.s.enmEffAddrMode)
5186 {
5187 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5188 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5189 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5191 }
5192 break;
5193 case IEMMODE_32BIT:
5194 switch (pVCpu->iem.s.enmEffAddrMode)
5195 {
5196 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5197 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5198 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5200 }
5201 case IEMMODE_64BIT:
5202 switch (pVCpu->iem.s.enmEffAddrMode)
5203 {
5204 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5205 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5206 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5208 }
5209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5210 }
5211 }
5212
5213 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5214
5215 /*
5216 * Annoying double switch here.
5217 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5218 */
5219 switch (pVCpu->iem.s.enmEffOpSize)
5220 {
5221 case IEMMODE_16BIT:
5222 switch (pVCpu->iem.s.enmEffAddrMode)
5223 {
5224 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5225 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5226 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5228 }
5229 break;
5230
5231 case IEMMODE_32BIT:
5232 switch (pVCpu->iem.s.enmEffAddrMode)
5233 {
5234 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5235 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5236 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5238 }
5239 break;
5240
5241 case IEMMODE_64BIT:
5242 switch (pVCpu->iem.s.enmEffAddrMode)
5243 {
5244 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5245 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5246 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5248 }
5249 break;
5250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5251 }
5252 return VINF_SUCCESS;
5253
5254}
5255
5256#undef IEM_CMPS_CASE
5257
5258/**
5259 * @opcode 0xa8
5260 */
5261FNIEMOP_DEF(iemOp_test_AL_Ib)
5262{
5263 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5264 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5265 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5266}
5267
5268
5269/**
5270 * @opcode 0xa9
5271 */
5272FNIEMOP_DEF(iemOp_test_eAX_Iz)
5273{
5274 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5277}
5278
5279
5280/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5281#define IEM_STOS_CASE(ValBits, AddrBits) \
5282 IEM_MC_BEGIN(0, 2); \
5283 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5284 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5285 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5286 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5287 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5289 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5290 } IEM_MC_ELSE() { \
5291 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5292 } IEM_MC_ENDIF(); \
5293 IEM_MC_ADVANCE_RIP(); \
5294 IEM_MC_END(); \
5295
5296/**
5297 * @opcode 0xaa
5298 */
5299FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5300{
5301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5302
5303 /*
5304 * Use the C implementation if a repeat prefix is encountered.
5305 */
5306 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5307 {
5308 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5309 switch (pVCpu->iem.s.enmEffAddrMode)
5310 {
5311 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5312 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5313 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5315 }
5316 }
5317 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5318
5319 /*
5320 * Sharing case implementation with stos[wdq] below.
5321 */
5322 switch (pVCpu->iem.s.enmEffAddrMode)
5323 {
5324 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5325 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5326 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5328 }
5329 return VINF_SUCCESS;
5330}
5331
5332
5333/**
5334 * @opcode 0xab
5335 */
5336FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5337{
5338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5339
5340 /*
5341 * Use the C implementation if a repeat prefix is encountered.
5342 */
5343 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5344 {
5345 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5346 switch (pVCpu->iem.s.enmEffOpSize)
5347 {
5348 case IEMMODE_16BIT:
5349 switch (pVCpu->iem.s.enmEffAddrMode)
5350 {
5351 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5352 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5353 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5355 }
5356 break;
5357 case IEMMODE_32BIT:
5358 switch (pVCpu->iem.s.enmEffAddrMode)
5359 {
5360 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5361 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5362 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5364 }
5365 case IEMMODE_64BIT:
5366 switch (pVCpu->iem.s.enmEffAddrMode)
5367 {
5368 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5369 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5370 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5372 }
5373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5374 }
5375 }
5376 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5377
5378 /*
5379 * Annoying double switch here.
5380 * Using ugly macro for implementing the cases, sharing it with stosb.
5381 */
5382 switch (pVCpu->iem.s.enmEffOpSize)
5383 {
5384 case IEMMODE_16BIT:
5385 switch (pVCpu->iem.s.enmEffAddrMode)
5386 {
5387 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5388 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5389 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5391 }
5392 break;
5393
5394 case IEMMODE_32BIT:
5395 switch (pVCpu->iem.s.enmEffAddrMode)
5396 {
5397 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5398 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5399 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5401 }
5402 break;
5403
5404 case IEMMODE_64BIT:
5405 switch (pVCpu->iem.s.enmEffAddrMode)
5406 {
5407 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5408 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5409 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5411 }
5412 break;
5413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5414 }
5415 return VINF_SUCCESS;
5416}
5417
5418#undef IEM_STOS_CASE
5419
5420/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5421#define IEM_LODS_CASE(ValBits, AddrBits) \
5422 IEM_MC_BEGIN(0, 2); \
5423 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5424 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5425 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5426 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5427 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5428 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5429 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5430 } IEM_MC_ELSE() { \
5431 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5432 } IEM_MC_ENDIF(); \
5433 IEM_MC_ADVANCE_RIP(); \
5434 IEM_MC_END();
5435
5436/**
5437 * @opcode 0xac
5438 */
5439FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5440{
5441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5442
5443 /*
5444 * Use the C implementation if a repeat prefix is encountered.
5445 */
5446 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5447 {
5448 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5449 switch (pVCpu->iem.s.enmEffAddrMode)
5450 {
5451 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5452 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5453 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5455 }
5456 }
5457 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5458
5459 /*
5460 * Sharing case implementation with stos[wdq] below.
5461 */
5462 switch (pVCpu->iem.s.enmEffAddrMode)
5463 {
5464 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5465 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5466 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5468 }
5469 return VINF_SUCCESS;
5470}
5471
5472
5473/**
5474 * @opcode 0xad
5475 */
5476FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5477{
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479
5480 /*
5481 * Use the C implementation if a repeat prefix is encountered.
5482 */
5483 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5484 {
5485 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5486 switch (pVCpu->iem.s.enmEffOpSize)
5487 {
5488 case IEMMODE_16BIT:
5489 switch (pVCpu->iem.s.enmEffAddrMode)
5490 {
5491 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5492 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5493 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5495 }
5496 break;
5497 case IEMMODE_32BIT:
5498 switch (pVCpu->iem.s.enmEffAddrMode)
5499 {
5500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5504 }
5505 case IEMMODE_64BIT:
5506 switch (pVCpu->iem.s.enmEffAddrMode)
5507 {
5508 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5509 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5510 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5512 }
5513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5514 }
5515 }
5516 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5517
5518 /*
5519 * Annoying double switch here.
5520 * Using ugly macro for implementing the cases, sharing it with lodsb.
5521 */
5522 switch (pVCpu->iem.s.enmEffOpSize)
5523 {
5524 case IEMMODE_16BIT:
5525 switch (pVCpu->iem.s.enmEffAddrMode)
5526 {
5527 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5528 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5529 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5531 }
5532 break;
5533
5534 case IEMMODE_32BIT:
5535 switch (pVCpu->iem.s.enmEffAddrMode)
5536 {
5537 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5538 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5539 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5541 }
5542 break;
5543
5544 case IEMMODE_64BIT:
5545 switch (pVCpu->iem.s.enmEffAddrMode)
5546 {
5547 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5548 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5549 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5551 }
5552 break;
5553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5554 }
5555 return VINF_SUCCESS;
5556}
5557
5558#undef IEM_LODS_CASE
5559
5560/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5561#define IEM_SCAS_CASE(ValBits, AddrBits) \
5562 IEM_MC_BEGIN(3, 2); \
5563 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5564 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5565 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5566 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5567 \
5568 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5569 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5570 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5571 IEM_MC_REF_EFLAGS(pEFlags); \
5572 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5573 \
5574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5575 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5576 } IEM_MC_ELSE() { \
5577 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5578 } IEM_MC_ENDIF(); \
5579 IEM_MC_ADVANCE_RIP(); \
5580 IEM_MC_END();
5581
5582/**
5583 * @opcode 0xae
5584 */
5585FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5586{
5587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5588
5589 /*
5590 * Use the C implementation if a repeat prefix is encountered.
5591 */
5592 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5593 {
5594 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5595 switch (pVCpu->iem.s.enmEffAddrMode)
5596 {
5597 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5601 }
5602 }
5603 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5604 {
5605 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5606 switch (pVCpu->iem.s.enmEffAddrMode)
5607 {
5608 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5609 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5610 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5612 }
5613 }
5614 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5615
5616 /*
5617 * Sharing case implementation with stos[wdq] below.
5618 */
5619 switch (pVCpu->iem.s.enmEffAddrMode)
5620 {
5621 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5622 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5623 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5625 }
5626 return VINF_SUCCESS;
5627}
5628
5629
5630/**
5631 * @opcode 0xaf
5632 */
5633FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5634{
5635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5636
5637 /*
5638 * Use the C implementation if a repeat prefix is encountered.
5639 */
5640 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5641 {
5642 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5643 switch (pVCpu->iem.s.enmEffOpSize)
5644 {
5645 case IEMMODE_16BIT:
5646 switch (pVCpu->iem.s.enmEffAddrMode)
5647 {
5648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5652 }
5653 break;
5654 case IEMMODE_32BIT:
5655 switch (pVCpu->iem.s.enmEffAddrMode)
5656 {
5657 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5658 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5659 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5661 }
5662 case IEMMODE_64BIT:
5663 switch (pVCpu->iem.s.enmEffAddrMode)
5664 {
5665 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5666 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5667 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5669 }
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5674 {
5675 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5676 switch (pVCpu->iem.s.enmEffOpSize)
5677 {
5678 case IEMMODE_16BIT:
5679 switch (pVCpu->iem.s.enmEffAddrMode)
5680 {
5681 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5682 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5683 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5685 }
5686 break;
5687 case IEMMODE_32BIT:
5688 switch (pVCpu->iem.s.enmEffAddrMode)
5689 {
5690 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5691 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5692 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5694 }
5695 case IEMMODE_64BIT:
5696 switch (pVCpu->iem.s.enmEffAddrMode)
5697 {
5698 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5699 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5700 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5702 }
5703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5704 }
5705 }
5706 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5707
5708 /*
5709 * Annoying double switch here.
5710 * Using ugly macro for implementing the cases, sharing it with scasb.
5711 */
5712 switch (pVCpu->iem.s.enmEffOpSize)
5713 {
5714 case IEMMODE_16BIT:
5715 switch (pVCpu->iem.s.enmEffAddrMode)
5716 {
5717 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5718 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5719 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5721 }
5722 break;
5723
5724 case IEMMODE_32BIT:
5725 switch (pVCpu->iem.s.enmEffAddrMode)
5726 {
5727 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5728 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5729 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5731 }
5732 break;
5733
5734 case IEMMODE_64BIT:
5735 switch (pVCpu->iem.s.enmEffAddrMode)
5736 {
5737 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5738 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5739 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 break;
5743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5744 }
5745 return VINF_SUCCESS;
5746}
5747
5748#undef IEM_SCAS_CASE
5749
5750/**
5751 * Common 'mov r8, imm8' helper.
5752 */
5753FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5754{
5755 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757
5758 IEM_MC_BEGIN(0, 1);
5759 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5760 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5761 IEM_MC_ADVANCE_RIP();
5762 IEM_MC_END();
5763
5764 return VINF_SUCCESS;
5765}
5766
5767
5768/**
5769 * @opcode 0xb0
5770 */
5771FNIEMOP_DEF(iemOp_mov_AL_Ib)
5772{
5773 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5774 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5775}
5776
5777
5778/**
5779 * @opcode 0xb1
5780 */
5781FNIEMOP_DEF(iemOp_CL_Ib)
5782{
5783 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5784 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5785}
5786
5787
5788/**
5789 * @opcode 0xb2
5790 */
5791FNIEMOP_DEF(iemOp_DL_Ib)
5792{
5793 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5794 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5795}
5796
5797
5798/**
5799 * @opcode 0xb3
5800 */
5801FNIEMOP_DEF(iemOp_BL_Ib)
5802{
5803 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5804 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5805}
5806
5807
5808/**
5809 * @opcode 0xb4
5810 */
5811FNIEMOP_DEF(iemOp_mov_AH_Ib)
5812{
5813 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5814 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5815}
5816
5817
5818/**
5819 * @opcode 0xb5
5820 */
5821FNIEMOP_DEF(iemOp_CH_Ib)
5822{
5823 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5824 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5825}
5826
5827
5828/**
5829 * @opcode 0xb6
5830 */
5831FNIEMOP_DEF(iemOp_DH_Ib)
5832{
5833 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5834 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5835}
5836
5837
5838/**
5839 * @opcode 0xb7
5840 */
5841FNIEMOP_DEF(iemOp_BH_Ib)
5842{
5843 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5844 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5845}
5846
5847
5848/**
5849 * Common 'mov regX,immX' helper.
5850 */
5851FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5852{
5853 switch (pVCpu->iem.s.enmEffOpSize)
5854 {
5855 case IEMMODE_16BIT:
5856 {
5857 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5859
5860 IEM_MC_BEGIN(0, 1);
5861 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5862 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 break;
5866 }
5867
5868 case IEMMODE_32BIT:
5869 {
5870 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5872
5873 IEM_MC_BEGIN(0, 1);
5874 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5875 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 break;
5879 }
5880 case IEMMODE_64BIT:
5881 {
5882 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5884
5885 IEM_MC_BEGIN(0, 1);
5886 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5887 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5888 IEM_MC_ADVANCE_RIP();
5889 IEM_MC_END();
5890 break;
5891 }
5892 }
5893
5894 return VINF_SUCCESS;
5895}
5896
5897
5898/**
5899 * @opcode 0xb8
5900 */
5901FNIEMOP_DEF(iemOp_eAX_Iv)
5902{
5903 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5904 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5905}
5906
5907
5908/**
5909 * @opcode 0xb9
5910 */
5911FNIEMOP_DEF(iemOp_eCX_Iv)
5912{
5913 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5914 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5915}
5916
5917
5918/**
5919 * @opcode 0xba
5920 */
5921FNIEMOP_DEF(iemOp_eDX_Iv)
5922{
5923 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5924 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5925}
5926
5927
5928/**
5929 * @opcode 0xbb
5930 */
5931FNIEMOP_DEF(iemOp_eBX_Iv)
5932{
5933 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5934 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5935}
5936
5937
5938/**
5939 * @opcode 0xbc
5940 */
5941FNIEMOP_DEF(iemOp_eSP_Iv)
5942{
5943 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5944 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5945}
5946
5947
5948/**
5949 * @opcode 0xbd
5950 */
5951FNIEMOP_DEF(iemOp_eBP_Iv)
5952{
5953 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5954 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5955}
5956
5957
5958/**
5959 * @opcode 0xbe
5960 */
5961FNIEMOP_DEF(iemOp_eSI_Iv)
5962{
5963 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5964 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5965}
5966
5967
5968/**
5969 * @opcode 0xbf
5970 */
5971FNIEMOP_DEF(iemOp_eDI_Iv)
5972{
5973 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5974 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5975}
5976
5977
5978/**
5979 * @opcode 0xc0
5980 */
5981FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5982{
5983 IEMOP_HLP_MIN_186();
5984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5985 PCIEMOPSHIFTSIZES pImpl;
5986 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5987 {
5988 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5989 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5990 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5991 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5992 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5993 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5994 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5995 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5996 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5997 }
5998 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5999
6000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6001 {
6002 /* register */
6003 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6005 IEM_MC_BEGIN(3, 0);
6006 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6007 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6008 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6009 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6010 IEM_MC_REF_EFLAGS(pEFlags);
6011 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6012 IEM_MC_ADVANCE_RIP();
6013 IEM_MC_END();
6014 }
6015 else
6016 {
6017 /* memory */
6018 IEM_MC_BEGIN(3, 2);
6019 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6020 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6021 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6023
6024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6025 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6026 IEM_MC_ASSIGN(cShiftArg, cShift);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6029 IEM_MC_FETCH_EFLAGS(EFlags);
6030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6031
6032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6033 IEM_MC_COMMIT_EFLAGS(EFlags);
6034 IEM_MC_ADVANCE_RIP();
6035 IEM_MC_END();
6036 }
6037 return VINF_SUCCESS;
6038}
6039
6040
6041/**
6042 * @opcode 0xc1
6043 */
6044FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6045{
6046 IEMOP_HLP_MIN_186();
6047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6048 PCIEMOPSHIFTSIZES pImpl;
6049 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6050 {
6051 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6052 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6053 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6054 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6055 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6056 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6057 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6058 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6059 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6060 }
6061 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6062
6063 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6064 {
6065 /* register */
6066 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6068 switch (pVCpu->iem.s.enmEffOpSize)
6069 {
6070 case IEMMODE_16BIT:
6071 IEM_MC_BEGIN(3, 0);
6072 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6073 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6074 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6075 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6076 IEM_MC_REF_EFLAGS(pEFlags);
6077 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6078 IEM_MC_ADVANCE_RIP();
6079 IEM_MC_END();
6080 return VINF_SUCCESS;
6081
6082 case IEMMODE_32BIT:
6083 IEM_MC_BEGIN(3, 0);
6084 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6085 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6087 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6088 IEM_MC_REF_EFLAGS(pEFlags);
6089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6090 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6091 IEM_MC_ADVANCE_RIP();
6092 IEM_MC_END();
6093 return VINF_SUCCESS;
6094
6095 case IEMMODE_64BIT:
6096 IEM_MC_BEGIN(3, 0);
6097 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6098 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6100 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6101 IEM_MC_REF_EFLAGS(pEFlags);
6102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6103 IEM_MC_ADVANCE_RIP();
6104 IEM_MC_END();
6105 return VINF_SUCCESS;
6106
6107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6108 }
6109 }
6110 else
6111 {
6112 /* memory */
6113 switch (pVCpu->iem.s.enmEffOpSize)
6114 {
6115 case IEMMODE_16BIT:
6116 IEM_MC_BEGIN(3, 2);
6117 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6118 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6119 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6121
6122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6123 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6124 IEM_MC_ASSIGN(cShiftArg, cShift);
6125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6126 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6127 IEM_MC_FETCH_EFLAGS(EFlags);
6128 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6129
6130 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6131 IEM_MC_COMMIT_EFLAGS(EFlags);
6132 IEM_MC_ADVANCE_RIP();
6133 IEM_MC_END();
6134 return VINF_SUCCESS;
6135
6136 case IEMMODE_32BIT:
6137 IEM_MC_BEGIN(3, 2);
6138 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6139 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6140 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6142
6143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6144 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6145 IEM_MC_ASSIGN(cShiftArg, cShift);
6146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6147 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6148 IEM_MC_FETCH_EFLAGS(EFlags);
6149 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6150
6151 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6152 IEM_MC_COMMIT_EFLAGS(EFlags);
6153 IEM_MC_ADVANCE_RIP();
6154 IEM_MC_END();
6155 return VINF_SUCCESS;
6156
6157 case IEMMODE_64BIT:
6158 IEM_MC_BEGIN(3, 2);
6159 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6160 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6161 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6163
6164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6165 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6166 IEM_MC_ASSIGN(cShiftArg, cShift);
6167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6168 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6169 IEM_MC_FETCH_EFLAGS(EFlags);
6170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6171
6172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6173 IEM_MC_COMMIT_EFLAGS(EFlags);
6174 IEM_MC_ADVANCE_RIP();
6175 IEM_MC_END();
6176 return VINF_SUCCESS;
6177
6178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6179 }
6180 }
6181}
6182
6183
6184/**
6185 * @opcode 0xc2
6186 */
6187FNIEMOP_DEF(iemOp_retn_Iw)
6188{
6189 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6190 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6192 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6193 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6194}
6195
6196
6197/**
6198 * @opcode 0xc3
6199 */
6200FNIEMOP_DEF(iemOp_retn)
6201{
6202 IEMOP_MNEMONIC(retn, "retn");
6203 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6205 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6206}
6207
6208
6209/**
6210 * @opcode 0xc4
6211 */
6212FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6213{
6214 /* The LDS instruction is invalid 64-bit mode. In legacy and
6215 compatability mode it is invalid with MOD=3.
6216 The use as a VEX prefix is made possible by assigning the inverted
6217 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6218 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6220 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6221 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
6222 {
6223 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6224 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6225 {
6226 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6227 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6228 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6229 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6230 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6231 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6232 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6233 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6234 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6235 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6236 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6237 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6238 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6239
6240 switch (bRm & 0x1f)
6241 {
6242 case 1: /* 0x0f lead opcode byte. */
6243#ifdef IEM_WITH_VEX
6244 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6245#else
6246 IEMOP_BITCH_ABOUT_STUB();
6247 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6248#endif
6249
6250 case 2: /* 0x0f 0x38 lead opcode bytes. */
6251#ifdef IEM_WITH_VEX
6252 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6253#else
6254 IEMOP_BITCH_ABOUT_STUB();
6255 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6256#endif
6257
6258 case 3: /* 0x0f 0x3a lead opcode bytes. */
6259#ifdef IEM_WITH_VEX
6260 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6261#else
6262 IEMOP_BITCH_ABOUT_STUB();
6263 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6264#endif
6265
6266 default:
6267 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6268 return IEMOP_RAISE_INVALID_OPCODE();
6269 }
6270 }
6271 Log(("VEX3: AVX support disabled!\n"));
6272 return IEMOP_RAISE_INVALID_OPCODE();
6273 }
6274
6275 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6276 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6277}
6278
6279
6280/**
6281 * @opcode 0xc5
6282 */
6283FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6284{
6285 /* The LES instruction is invalid 64-bit mode. In legacy and
6286 compatability mode it is invalid with MOD=3.
6287 The use as a VEX prefix is made possible by assigning the inverted
6288 REX.R to the top MOD bit, and the top bit in the inverted register
6289 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6290 to accessing registers 0..7 in this VEX form. */
6291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6292 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6293 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6294 {
6295 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6296 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6297 {
6298 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6299 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6300 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6301 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6302 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6303 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6304 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6305 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6306
6307#ifdef IEM_WITH_VEX
6308 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6309#else
6310 IEMOP_BITCH_ABOUT_STUB();
6311 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6312#endif
6313 }
6314
6315 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6316 Log(("VEX2: AVX support disabled!\n"));
6317 return IEMOP_RAISE_INVALID_OPCODE();
6318 }
6319
6320 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6321 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6322}
6323
6324
6325/**
6326 * @opcode 0xc6
6327 */
6328FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6329{
6330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6331 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6332 return IEMOP_RAISE_INVALID_OPCODE();
6333 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6334
6335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6336 {
6337 /* register access */
6338 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_BEGIN(0, 0);
6341 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6342 IEM_MC_ADVANCE_RIP();
6343 IEM_MC_END();
6344 }
6345 else
6346 {
6347 /* memory access. */
6348 IEM_MC_BEGIN(0, 1);
6349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6351 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6353 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6354 IEM_MC_ADVANCE_RIP();
6355 IEM_MC_END();
6356 }
6357 return VINF_SUCCESS;
6358}
6359
6360
6361/**
6362 * @opcode 0xc7
6363 */
6364FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6365{
6366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6367 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6368 return IEMOP_RAISE_INVALID_OPCODE();
6369 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6370
6371 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6372 {
6373 /* register access */
6374 switch (pVCpu->iem.s.enmEffOpSize)
6375 {
6376 case IEMMODE_16BIT:
6377 IEM_MC_BEGIN(0, 0);
6378 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6380 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6381 IEM_MC_ADVANCE_RIP();
6382 IEM_MC_END();
6383 return VINF_SUCCESS;
6384
6385 case IEMMODE_32BIT:
6386 IEM_MC_BEGIN(0, 0);
6387 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6389 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6390 IEM_MC_ADVANCE_RIP();
6391 IEM_MC_END();
6392 return VINF_SUCCESS;
6393
6394 case IEMMODE_64BIT:
6395 IEM_MC_BEGIN(0, 0);
6396 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6398 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6399 IEM_MC_ADVANCE_RIP();
6400 IEM_MC_END();
6401 return VINF_SUCCESS;
6402
6403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6404 }
6405 }
6406 else
6407 {
6408 /* memory access. */
6409 switch (pVCpu->iem.s.enmEffOpSize)
6410 {
6411 case IEMMODE_16BIT:
6412 IEM_MC_BEGIN(0, 1);
6413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6415 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6417 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 return VINF_SUCCESS;
6421
6422 case IEMMODE_32BIT:
6423 IEM_MC_BEGIN(0, 1);
6424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6426 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6428 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6429 IEM_MC_ADVANCE_RIP();
6430 IEM_MC_END();
6431 return VINF_SUCCESS;
6432
6433 case IEMMODE_64BIT:
6434 IEM_MC_BEGIN(0, 1);
6435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6437 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6440 IEM_MC_ADVANCE_RIP();
6441 IEM_MC_END();
6442 return VINF_SUCCESS;
6443
6444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6445 }
6446 }
6447}
6448
6449
6450
6451
6452/**
6453 * @opcode 0xc8
6454 */
6455FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6456{
6457 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6458 IEMOP_HLP_MIN_186();
6459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6460 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6461 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6463 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6464}
6465
6466
6467/**
6468 * @opcode 0xc9
6469 */
6470FNIEMOP_DEF(iemOp_leave)
6471{
6472 IEMOP_MNEMONIC(leave, "leave");
6473 IEMOP_HLP_MIN_186();
6474 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6477}
6478
6479
6480/**
6481 * @opcode 0xca
6482 */
6483FNIEMOP_DEF(iemOp_retf_Iw)
6484{
6485 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6486 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6489 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6490}
6491
6492
6493/**
6494 * @opcode 0xcb
6495 */
6496FNIEMOP_DEF(iemOp_retf)
6497{
6498 IEMOP_MNEMONIC(retf, "retf");
6499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6501 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6502}
6503
6504
6505/**
6506 * @opcode 0xcc
6507 */
6508FNIEMOP_DEF(iemOp_int3)
6509{
6510 IEMOP_MNEMONIC(int3, "int3");
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6513}
6514
6515
6516/**
6517 * @opcode 0xcd
6518 */
6519FNIEMOP_DEF(iemOp_int_Ib)
6520{
6521 IEMOP_MNEMONIC(int_Ib, "int Ib");
6522 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6524 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6525}
6526
6527
6528/**
6529 * @opcode 0xce
6530 */
6531FNIEMOP_DEF(iemOp_into)
6532{
6533 IEMOP_MNEMONIC(into, "into");
6534 IEMOP_HLP_NO_64BIT();
6535
6536 IEM_MC_BEGIN(2, 0);
6537 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6538 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6539 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6540 IEM_MC_END();
6541 return VINF_SUCCESS;
6542}
6543
6544
6545/**
6546 * @opcode 0xcf
6547 */
6548FNIEMOP_DEF(iemOp_iret)
6549{
6550 IEMOP_MNEMONIC(iret, "iret");
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6552 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6553}
6554
6555
6556/**
6557 * @opcode 0xd0
6558 */
6559FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6560{
6561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6562 PCIEMOPSHIFTSIZES pImpl;
6563 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6564 {
6565 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6566 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6567 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6568 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6569 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6570 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6571 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6573 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6574 }
6575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6576
6577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6578 {
6579 /* register */
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6581 IEM_MC_BEGIN(3, 0);
6582 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6583 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6584 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6585 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6586 IEM_MC_REF_EFLAGS(pEFlags);
6587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 }
6591 else
6592 {
6593 /* memory */
6594 IEM_MC_BEGIN(3, 2);
6595 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6596 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6597 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6599
6600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6602 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6603 IEM_MC_FETCH_EFLAGS(EFlags);
6604 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6605
6606 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6607 IEM_MC_COMMIT_EFLAGS(EFlags);
6608 IEM_MC_ADVANCE_RIP();
6609 IEM_MC_END();
6610 }
6611 return VINF_SUCCESS;
6612}
6613
6614
6615
6616/**
6617 * @opcode 0xd1
6618 */
6619FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6620{
6621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6622 PCIEMOPSHIFTSIZES pImpl;
6623 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6624 {
6625 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6626 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6627 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6628 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6629 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6630 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6631 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6632 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6633 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6634 }
6635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6636
6637 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6638 {
6639 /* register */
6640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6641 switch (pVCpu->iem.s.enmEffOpSize)
6642 {
6643 case IEMMODE_16BIT:
6644 IEM_MC_BEGIN(3, 0);
6645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6646 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6647 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6648 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6649 IEM_MC_REF_EFLAGS(pEFlags);
6650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6651 IEM_MC_ADVANCE_RIP();
6652 IEM_MC_END();
6653 return VINF_SUCCESS;
6654
6655 case IEMMODE_32BIT:
6656 IEM_MC_BEGIN(3, 0);
6657 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6658 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6660 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6661 IEM_MC_REF_EFLAGS(pEFlags);
6662 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6663 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6664 IEM_MC_ADVANCE_RIP();
6665 IEM_MC_END();
6666 return VINF_SUCCESS;
6667
6668 case IEMMODE_64BIT:
6669 IEM_MC_BEGIN(3, 0);
6670 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6671 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6673 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6674 IEM_MC_REF_EFLAGS(pEFlags);
6675 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6676 IEM_MC_ADVANCE_RIP();
6677 IEM_MC_END();
6678 return VINF_SUCCESS;
6679
6680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6681 }
6682 }
6683 else
6684 {
6685 /* memory */
6686 switch (pVCpu->iem.s.enmEffOpSize)
6687 {
6688 case IEMMODE_16BIT:
6689 IEM_MC_BEGIN(3, 2);
6690 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6691 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6692 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6694
6695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6697 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6698 IEM_MC_FETCH_EFLAGS(EFlags);
6699 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6700
6701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6702 IEM_MC_COMMIT_EFLAGS(EFlags);
6703 IEM_MC_ADVANCE_RIP();
6704 IEM_MC_END();
6705 return VINF_SUCCESS;
6706
6707 case IEMMODE_32BIT:
6708 IEM_MC_BEGIN(3, 2);
6709 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6710 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6711 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6713
6714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6716 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6717 IEM_MC_FETCH_EFLAGS(EFlags);
6718 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6719
6720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6721 IEM_MC_COMMIT_EFLAGS(EFlags);
6722 IEM_MC_ADVANCE_RIP();
6723 IEM_MC_END();
6724 return VINF_SUCCESS;
6725
6726 case IEMMODE_64BIT:
6727 IEM_MC_BEGIN(3, 2);
6728 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6729 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6730 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6732
6733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6735 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6736 IEM_MC_FETCH_EFLAGS(EFlags);
6737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6738
6739 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6740 IEM_MC_COMMIT_EFLAGS(EFlags);
6741 IEM_MC_ADVANCE_RIP();
6742 IEM_MC_END();
6743 return VINF_SUCCESS;
6744
6745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6746 }
6747 }
6748}
6749
6750
6751/**
6752 * @opcode 0xd2
6753 */
6754FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6755{
6756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6757 PCIEMOPSHIFTSIZES pImpl;
6758 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6759 {
6760 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6761 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6762 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6763 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6764 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6765 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6766 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6767 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6768 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6769 }
6770 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6771
6772 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6773 {
6774 /* register */
6775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6776 IEM_MC_BEGIN(3, 0);
6777 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6778 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6779 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6780 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6781 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6782 IEM_MC_REF_EFLAGS(pEFlags);
6783 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6784 IEM_MC_ADVANCE_RIP();
6785 IEM_MC_END();
6786 }
6787 else
6788 {
6789 /* memory */
6790 IEM_MC_BEGIN(3, 2);
6791 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6792 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6793 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6795
6796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6798 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6799 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6800 IEM_MC_FETCH_EFLAGS(EFlags);
6801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6802
6803 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6804 IEM_MC_COMMIT_EFLAGS(EFlags);
6805 IEM_MC_ADVANCE_RIP();
6806 IEM_MC_END();
6807 }
6808 return VINF_SUCCESS;
6809}
6810
6811
6812/**
6813 * @opcode 0xd3
6814 */
6815FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6816{
6817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6818 PCIEMOPSHIFTSIZES pImpl;
6819 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6820 {
6821 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6822 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6823 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6824 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6825 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6826 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6827 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6828 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6829 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6830 }
6831 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6832
6833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6834 {
6835 /* register */
6836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6837 switch (pVCpu->iem.s.enmEffOpSize)
6838 {
6839 case IEMMODE_16BIT:
6840 IEM_MC_BEGIN(3, 0);
6841 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6842 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6843 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6844 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6845 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6846 IEM_MC_REF_EFLAGS(pEFlags);
6847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6848 IEM_MC_ADVANCE_RIP();
6849 IEM_MC_END();
6850 return VINF_SUCCESS;
6851
6852 case IEMMODE_32BIT:
6853 IEM_MC_BEGIN(3, 0);
6854 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6855 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6856 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6857 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6858 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6859 IEM_MC_REF_EFLAGS(pEFlags);
6860 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6861 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6862 IEM_MC_ADVANCE_RIP();
6863 IEM_MC_END();
6864 return VINF_SUCCESS;
6865
6866 case IEMMODE_64BIT:
6867 IEM_MC_BEGIN(3, 0);
6868 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6869 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6870 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6871 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6872 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6873 IEM_MC_REF_EFLAGS(pEFlags);
6874 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6875 IEM_MC_ADVANCE_RIP();
6876 IEM_MC_END();
6877 return VINF_SUCCESS;
6878
6879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6880 }
6881 }
6882 else
6883 {
6884 /* memory */
6885 switch (pVCpu->iem.s.enmEffOpSize)
6886 {
6887 case IEMMODE_16BIT:
6888 IEM_MC_BEGIN(3, 2);
6889 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6890 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6891 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6893
6894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6896 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6897 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6898 IEM_MC_FETCH_EFLAGS(EFlags);
6899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6900
6901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6902 IEM_MC_COMMIT_EFLAGS(EFlags);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 return VINF_SUCCESS;
6906
6907 case IEMMODE_32BIT:
6908 IEM_MC_BEGIN(3, 2);
6909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6910 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6911 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6913
6914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6916 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6917 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6918 IEM_MC_FETCH_EFLAGS(EFlags);
6919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6920
6921 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6922 IEM_MC_COMMIT_EFLAGS(EFlags);
6923 IEM_MC_ADVANCE_RIP();
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926
6927 case IEMMODE_64BIT:
6928 IEM_MC_BEGIN(3, 2);
6929 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6930 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6931 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6933
6934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6936 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6937 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6938 IEM_MC_FETCH_EFLAGS(EFlags);
6939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6940
6941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6942 IEM_MC_COMMIT_EFLAGS(EFlags);
6943 IEM_MC_ADVANCE_RIP();
6944 IEM_MC_END();
6945 return VINF_SUCCESS;
6946
6947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6948 }
6949 }
6950}
6951
6952/**
6953 * @opcode 0xd4
6954 */
6955FNIEMOP_DEF(iemOp_aam_Ib)
6956{
6957 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6958 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6960 IEMOP_HLP_NO_64BIT();
6961 if (!bImm)
6962 return IEMOP_RAISE_DIVIDE_ERROR();
6963 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6964}
6965
6966
6967/**
6968 * @opcode 0xd5
6969 */
6970FNIEMOP_DEF(iemOp_aad_Ib)
6971{
6972 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6973 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6975 IEMOP_HLP_NO_64BIT();
6976 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6977}
6978
6979
6980/**
6981 * @opcode 0xd6
6982 */
6983FNIEMOP_DEF(iemOp_salc)
6984{
6985 IEMOP_MNEMONIC(salc, "salc");
6986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6987 IEMOP_HLP_NO_64BIT();
6988
6989 IEM_MC_BEGIN(0, 0);
6990 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6991 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6992 } IEM_MC_ELSE() {
6993 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6994 } IEM_MC_ENDIF();
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 return VINF_SUCCESS;
6998}
6999
7000
7001/**
7002 * @opcode 0xd7
7003 */
7004FNIEMOP_DEF(iemOp_xlat)
7005{
7006 IEMOP_MNEMONIC(xlat, "xlat");
7007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7008 switch (pVCpu->iem.s.enmEffAddrMode)
7009 {
7010 case IEMMODE_16BIT:
7011 IEM_MC_BEGIN(2, 0);
7012 IEM_MC_LOCAL(uint8_t, u8Tmp);
7013 IEM_MC_LOCAL(uint16_t, u16Addr);
7014 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7015 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7016 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7017 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7018 IEM_MC_ADVANCE_RIP();
7019 IEM_MC_END();
7020 return VINF_SUCCESS;
7021
7022 case IEMMODE_32BIT:
7023 IEM_MC_BEGIN(2, 0);
7024 IEM_MC_LOCAL(uint8_t, u8Tmp);
7025 IEM_MC_LOCAL(uint32_t, u32Addr);
7026 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7027 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7028 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7029 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7030 IEM_MC_ADVANCE_RIP();
7031 IEM_MC_END();
7032 return VINF_SUCCESS;
7033
7034 case IEMMODE_64BIT:
7035 IEM_MC_BEGIN(2, 0);
7036 IEM_MC_LOCAL(uint8_t, u8Tmp);
7037 IEM_MC_LOCAL(uint64_t, u64Addr);
7038 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7039 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7040 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7041 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7042 IEM_MC_ADVANCE_RIP();
7043 IEM_MC_END();
7044 return VINF_SUCCESS;
7045
7046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7047 }
7048}
7049
7050
7051/**
7052 * Common worker for FPU instructions working on ST0 and STn, and storing the
7053 * result in ST0.
7054 *
7055 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7056 */
7057FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7058{
7059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7060
7061 IEM_MC_BEGIN(3, 1);
7062 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7063 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7065 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7066
7067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7069 IEM_MC_PREPARE_FPU_USAGE();
7070 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7071 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7072 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7073 IEM_MC_ELSE()
7074 IEM_MC_FPU_STACK_UNDERFLOW(0);
7075 IEM_MC_ENDIF();
7076 IEM_MC_ADVANCE_RIP();
7077
7078 IEM_MC_END();
7079 return VINF_SUCCESS;
7080}
7081
7082
7083/**
7084 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7085 * flags.
7086 *
7087 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7088 */
7089FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7090{
7091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7092
7093 IEM_MC_BEGIN(3, 1);
7094 IEM_MC_LOCAL(uint16_t, u16Fsw);
7095 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7096 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7097 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7098
7099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7101 IEM_MC_PREPARE_FPU_USAGE();
7102 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7103 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7104 IEM_MC_UPDATE_FSW(u16Fsw);
7105 IEM_MC_ELSE()
7106 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7107 IEM_MC_ENDIF();
7108 IEM_MC_ADVANCE_RIP();
7109
7110 IEM_MC_END();
7111 return VINF_SUCCESS;
7112}
7113
7114
7115/**
7116 * Common worker for FPU instructions working on ST0 and STn, only affecting
7117 * flags, and popping when done.
7118 *
7119 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7120 */
7121FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7122{
7123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7124
7125 IEM_MC_BEGIN(3, 1);
7126 IEM_MC_LOCAL(uint16_t, u16Fsw);
7127 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7130
7131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7133 IEM_MC_PREPARE_FPU_USAGE();
7134 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7135 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7136 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7137 IEM_MC_ELSE()
7138 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7139 IEM_MC_ENDIF();
7140 IEM_MC_ADVANCE_RIP();
7141
7142 IEM_MC_END();
7143 return VINF_SUCCESS;
7144}
7145
7146
7147/** Opcode 0xd8 11/0. */
7148FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7149{
7150 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7151 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7152}
7153
7154
7155/** Opcode 0xd8 11/1. */
7156FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7157{
7158 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7159 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7160}
7161
7162
7163/** Opcode 0xd8 11/2. */
7164FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7165{
7166 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7167 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7168}
7169
7170
7171/** Opcode 0xd8 11/3. */
7172FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7173{
7174 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7175 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7176}
7177
7178
7179/** Opcode 0xd8 11/4. */
7180FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7181{
7182 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7183 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7184}
7185
7186
7187/** Opcode 0xd8 11/5. */
7188FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7189{
7190 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7191 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7192}
7193
7194
7195/** Opcode 0xd8 11/6. */
7196FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7197{
7198 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7199 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7200}
7201
7202
7203/** Opcode 0xd8 11/7. */
7204FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7205{
7206 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7207 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7208}
7209
7210
7211/**
7212 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7213 * the result in ST0.
7214 *
7215 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7216 */
7217FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7218{
7219 IEM_MC_BEGIN(3, 3);
7220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7221 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7222 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7223 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7224 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7225 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7226
7227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7229
7230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7231 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7232 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7233
7234 IEM_MC_PREPARE_FPU_USAGE();
7235 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7236 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7237 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7238 IEM_MC_ELSE()
7239 IEM_MC_FPU_STACK_UNDERFLOW(0);
7240 IEM_MC_ENDIF();
7241 IEM_MC_ADVANCE_RIP();
7242
7243 IEM_MC_END();
7244 return VINF_SUCCESS;
7245}
7246
7247
7248/** Opcode 0xd8 !11/0. */
7249FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7250{
7251 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7252 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7253}
7254
7255
7256/** Opcode 0xd8 !11/1. */
7257FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7258{
7259 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7260 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7261}
7262
7263
7264/** Opcode 0xd8 !11/2. */
7265FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7266{
7267 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7268
7269 IEM_MC_BEGIN(3, 3);
7270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7271 IEM_MC_LOCAL(uint16_t, u16Fsw);
7272 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7273 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7275 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7276
7277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7279
7280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7282 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7283
7284 IEM_MC_PREPARE_FPU_USAGE();
7285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7286 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7287 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7288 IEM_MC_ELSE()
7289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7290 IEM_MC_ENDIF();
7291 IEM_MC_ADVANCE_RIP();
7292
7293 IEM_MC_END();
7294 return VINF_SUCCESS;
7295}
7296
7297
7298/** Opcode 0xd8 !11/3. */
7299FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7300{
7301 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7302
7303 IEM_MC_BEGIN(3, 3);
7304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7305 IEM_MC_LOCAL(uint16_t, u16Fsw);
7306 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7307 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7308 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7309 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7310
7311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7313
7314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7316 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7317
7318 IEM_MC_PREPARE_FPU_USAGE();
7319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7320 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7321 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7322 IEM_MC_ELSE()
7323 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7324 IEM_MC_ENDIF();
7325 IEM_MC_ADVANCE_RIP();
7326
7327 IEM_MC_END();
7328 return VINF_SUCCESS;
7329}
7330
7331
7332/** Opcode 0xd8 !11/4. */
7333FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7334{
7335 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7336 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7337}
7338
7339
7340/** Opcode 0xd8 !11/5. */
7341FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7342{
7343 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7344 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7345}
7346
7347
7348/** Opcode 0xd8 !11/6. */
7349FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7350{
7351 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7352 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7353}
7354
7355
7356/** Opcode 0xd8 !11/7. */
7357FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7358{
7359 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7361}
7362
7363
7364/**
7365 * @opcode 0xd8
7366 */
7367FNIEMOP_DEF(iemOp_EscF0)
7368{
7369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7370 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7371
7372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7373 {
7374 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7375 {
7376 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7377 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7378 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7379 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7380 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7381 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7382 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7383 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7385 }
7386 }
7387 else
7388 {
7389 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7390 {
7391 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7392 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7393 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7394 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7395 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7396 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7397 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7398 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7400 }
7401 }
7402}
7403
7404
7405/** Opcode 0xd9 /0 mem32real
7406 * @sa iemOp_fld_m64r */
7407FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7408{
7409 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7410
7411 IEM_MC_BEGIN(2, 3);
7412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7413 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7414 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7415 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7416 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7417
7418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7420
7421 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7422 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7423 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7424
7425 IEM_MC_PREPARE_FPU_USAGE();
7426 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7427 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7428 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7429 IEM_MC_ELSE()
7430 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7431 IEM_MC_ENDIF();
7432 IEM_MC_ADVANCE_RIP();
7433
7434 IEM_MC_END();
7435 return VINF_SUCCESS;
7436}
7437
7438
7439/** Opcode 0xd9 !11/2 mem32real */
7440FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7441{
7442 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7443 IEM_MC_BEGIN(3, 2);
7444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7445 IEM_MC_LOCAL(uint16_t, u16Fsw);
7446 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7447 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7449
7450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7454
7455 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7456 IEM_MC_PREPARE_FPU_USAGE();
7457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7458 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7459 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7460 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7461 IEM_MC_ELSE()
7462 IEM_MC_IF_FCW_IM()
7463 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7464 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7465 IEM_MC_ENDIF();
7466 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7467 IEM_MC_ENDIF();
7468 IEM_MC_ADVANCE_RIP();
7469
7470 IEM_MC_END();
7471 return VINF_SUCCESS;
7472}
7473
7474
7475/** Opcode 0xd9 !11/3 */
7476FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7477{
7478 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7479 IEM_MC_BEGIN(3, 2);
7480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7481 IEM_MC_LOCAL(uint16_t, u16Fsw);
7482 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7483 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7484 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7485
7486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7488 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7489 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7490
7491 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7492 IEM_MC_PREPARE_FPU_USAGE();
7493 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7494 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7495 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7496 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7497 IEM_MC_ELSE()
7498 IEM_MC_IF_FCW_IM()
7499 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7500 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7501 IEM_MC_ENDIF();
7502 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7503 IEM_MC_ENDIF();
7504 IEM_MC_ADVANCE_RIP();
7505
7506 IEM_MC_END();
7507 return VINF_SUCCESS;
7508}
7509
7510
7511/** Opcode 0xd9 !11/4 */
7512FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7513{
7514 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7515 IEM_MC_BEGIN(3, 0);
7516 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7517 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7518 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7522 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7523 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7524 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7525 IEM_MC_END();
7526 return VINF_SUCCESS;
7527}
7528
7529
7530/** Opcode 0xd9 !11/5 */
7531FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7532{
7533 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7534 IEM_MC_BEGIN(1, 1);
7535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7536 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7539 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7540 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7541 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7542 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7543 IEM_MC_END();
7544 return VINF_SUCCESS;
7545}
7546
7547
7548/** Opcode 0xd9 !11/6 */
7549FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7550{
7551 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7552 IEM_MC_BEGIN(3, 0);
7553 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7554 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7555 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7558 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7559 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7560 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7561 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7562 IEM_MC_END();
7563 return VINF_SUCCESS;
7564}
7565
7566
7567/** Opcode 0xd9 !11/7 */
7568FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7569{
7570 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7571 IEM_MC_BEGIN(2, 0);
7572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7573 IEM_MC_LOCAL(uint16_t, u16Fcw);
7574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7577 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7578 IEM_MC_FETCH_FCW(u16Fcw);
7579 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7580 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7581 IEM_MC_END();
7582 return VINF_SUCCESS;
7583}
7584
7585
7586/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7587FNIEMOP_DEF(iemOp_fnop)
7588{
7589 IEMOP_MNEMONIC(fnop, "fnop");
7590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7591
7592 IEM_MC_BEGIN(0, 0);
7593 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7594 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7595 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7596 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7597 * intel optimizations. Investigate. */
7598 IEM_MC_UPDATE_FPU_OPCODE_IP();
7599 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7600 IEM_MC_END();
7601 return VINF_SUCCESS;
7602}
7603
7604
7605/** Opcode 0xd9 11/0 stN */
7606FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7607{
7608 IEMOP_MNEMONIC(fld_stN, "fld stN");
7609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7610
7611 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7612 * indicates that it does. */
7613 IEM_MC_BEGIN(0, 2);
7614 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7615 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7618
7619 IEM_MC_PREPARE_FPU_USAGE();
7620 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7621 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7622 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7623 IEM_MC_ELSE()
7624 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7625 IEM_MC_ENDIF();
7626
7627 IEM_MC_ADVANCE_RIP();
7628 IEM_MC_END();
7629
7630 return VINF_SUCCESS;
7631}
7632
7633
7634/** Opcode 0xd9 11/3 stN */
7635FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7636{
7637 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7639
7640 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7641 * indicates that it does. */
7642 IEM_MC_BEGIN(1, 3);
7643 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7644 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7645 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7646 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7648 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7649
7650 IEM_MC_PREPARE_FPU_USAGE();
7651 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7652 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7653 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7654 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7655 IEM_MC_ELSE()
7656 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7657 IEM_MC_ENDIF();
7658
7659 IEM_MC_ADVANCE_RIP();
7660 IEM_MC_END();
7661
7662 return VINF_SUCCESS;
7663}
7664
7665
7666/** Opcode 0xd9 11/4, 0xdd 11/2. */
7667FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7668{
7669 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7671
7672 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7673 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7674 if (!iDstReg)
7675 {
7676 IEM_MC_BEGIN(0, 1);
7677 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7680
7681 IEM_MC_PREPARE_FPU_USAGE();
7682 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7683 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7684 IEM_MC_ELSE()
7685 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7686 IEM_MC_ENDIF();
7687
7688 IEM_MC_ADVANCE_RIP();
7689 IEM_MC_END();
7690 }
7691 else
7692 {
7693 IEM_MC_BEGIN(0, 2);
7694 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7695 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7698
7699 IEM_MC_PREPARE_FPU_USAGE();
7700 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7701 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7702 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7703 IEM_MC_ELSE()
7704 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7705 IEM_MC_ENDIF();
7706
7707 IEM_MC_ADVANCE_RIP();
7708 IEM_MC_END();
7709 }
7710 return VINF_SUCCESS;
7711}
7712
7713
7714/**
7715 * Common worker for FPU instructions working on ST0 and replaces it with the
7716 * result, i.e. unary operators.
7717 *
7718 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7719 */
7720FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7721{
7722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7723
7724 IEM_MC_BEGIN(2, 1);
7725 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7726 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7727 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7728
7729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7731 IEM_MC_PREPARE_FPU_USAGE();
7732 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7733 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7734 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7735 IEM_MC_ELSE()
7736 IEM_MC_FPU_STACK_UNDERFLOW(0);
7737 IEM_MC_ENDIF();
7738 IEM_MC_ADVANCE_RIP();
7739
7740 IEM_MC_END();
7741 return VINF_SUCCESS;
7742}
7743
7744
7745/** Opcode 0xd9 0xe0. */
7746FNIEMOP_DEF(iemOp_fchs)
7747{
7748 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7749 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7750}
7751
7752
7753/** Opcode 0xd9 0xe1. */
7754FNIEMOP_DEF(iemOp_fabs)
7755{
7756 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7757 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7758}
7759
7760
7761/**
7762 * Common worker for FPU instructions working on ST0 and only returns FSW.
7763 *
7764 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7765 */
7766FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7767{
7768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7769
7770 IEM_MC_BEGIN(2, 1);
7771 IEM_MC_LOCAL(uint16_t, u16Fsw);
7772 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7773 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7774
7775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7776 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7777 IEM_MC_PREPARE_FPU_USAGE();
7778 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7779 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7780 IEM_MC_UPDATE_FSW(u16Fsw);
7781 IEM_MC_ELSE()
7782 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7783 IEM_MC_ENDIF();
7784 IEM_MC_ADVANCE_RIP();
7785
7786 IEM_MC_END();
7787 return VINF_SUCCESS;
7788}
7789
7790
7791/** Opcode 0xd9 0xe4. */
7792FNIEMOP_DEF(iemOp_ftst)
7793{
7794 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7795 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7796}
7797
7798
7799/** Opcode 0xd9 0xe5. */
7800FNIEMOP_DEF(iemOp_fxam)
7801{
7802 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7803 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7804}
7805
7806
7807/**
7808 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7809 *
7810 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7811 */
7812FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7813{
7814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7815
7816 IEM_MC_BEGIN(1, 1);
7817 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7818 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7819
7820 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7821 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7822 IEM_MC_PREPARE_FPU_USAGE();
7823 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7824 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7825 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7826 IEM_MC_ELSE()
7827 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7828 IEM_MC_ENDIF();
7829 IEM_MC_ADVANCE_RIP();
7830
7831 IEM_MC_END();
7832 return VINF_SUCCESS;
7833}
7834
7835
7836/** Opcode 0xd9 0xe8. */
7837FNIEMOP_DEF(iemOp_fld1)
7838{
7839 IEMOP_MNEMONIC(fld1, "fld1");
7840 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7841}
7842
7843
7844/** Opcode 0xd9 0xe9. */
7845FNIEMOP_DEF(iemOp_fldl2t)
7846{
7847 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7848 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7849}
7850
7851
7852/** Opcode 0xd9 0xea. */
7853FNIEMOP_DEF(iemOp_fldl2e)
7854{
7855 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7856 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7857}
7858
7859/** Opcode 0xd9 0xeb. */
7860FNIEMOP_DEF(iemOp_fldpi)
7861{
7862 IEMOP_MNEMONIC(fldpi, "fldpi");
7863 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7864}
7865
7866
7867/** Opcode 0xd9 0xec. */
7868FNIEMOP_DEF(iemOp_fldlg2)
7869{
7870 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7871 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7872}
7873
7874/** Opcode 0xd9 0xed. */
7875FNIEMOP_DEF(iemOp_fldln2)
7876{
7877 IEMOP_MNEMONIC(fldln2, "fldln2");
7878 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7879}
7880
7881
7882/** Opcode 0xd9 0xee. */
7883FNIEMOP_DEF(iemOp_fldz)
7884{
7885 IEMOP_MNEMONIC(fldz, "fldz");
7886 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7887}
7888
7889
7890/** Opcode 0xd9 0xf0. */
7891FNIEMOP_DEF(iemOp_f2xm1)
7892{
7893 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7894 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7895}
7896
7897
7898/**
7899 * Common worker for FPU instructions working on STn and ST0, storing the result
7900 * in STn, and popping the stack unless IE, DE or ZE was raised.
7901 *
7902 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7903 */
7904FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7905{
7906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7907
7908 IEM_MC_BEGIN(3, 1);
7909 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7910 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7911 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7912 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7913
7914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7915 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7916
7917 IEM_MC_PREPARE_FPU_USAGE();
7918 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7919 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7920 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7921 IEM_MC_ELSE()
7922 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7923 IEM_MC_ENDIF();
7924 IEM_MC_ADVANCE_RIP();
7925
7926 IEM_MC_END();
7927 return VINF_SUCCESS;
7928}
7929
7930
7931/** Opcode 0xd9 0xf1. */
7932FNIEMOP_DEF(iemOp_fyl2x)
7933{
7934 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7935 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7936}
7937
7938
7939/**
7940 * Common worker for FPU instructions working on ST0 and having two outputs, one
7941 * replacing ST0 and one pushed onto the stack.
7942 *
7943 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7944 */
7945FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7946{
7947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7948
7949 IEM_MC_BEGIN(2, 1);
7950 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7951 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7952 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7953
7954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7956 IEM_MC_PREPARE_FPU_USAGE();
7957 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7958 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7959 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7960 IEM_MC_ELSE()
7961 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7962 IEM_MC_ENDIF();
7963 IEM_MC_ADVANCE_RIP();
7964
7965 IEM_MC_END();
7966 return VINF_SUCCESS;
7967}
7968
7969
7970/** Opcode 0xd9 0xf2. */
7971FNIEMOP_DEF(iemOp_fptan)
7972{
7973 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7974 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7975}
7976
7977
7978/** Opcode 0xd9 0xf3. */
7979FNIEMOP_DEF(iemOp_fpatan)
7980{
7981 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7982 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7983}
7984
7985
7986/** Opcode 0xd9 0xf4. */
7987FNIEMOP_DEF(iemOp_fxtract)
7988{
7989 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7990 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7991}
7992
7993
7994/** Opcode 0xd9 0xf5. */
7995FNIEMOP_DEF(iemOp_fprem1)
7996{
7997 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7998 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7999}
8000
8001
8002/** Opcode 0xd9 0xf6. */
8003FNIEMOP_DEF(iemOp_fdecstp)
8004{
8005 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8007 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8008 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8009 * FINCSTP and FDECSTP. */
8010
8011 IEM_MC_BEGIN(0,0);
8012
8013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8015
8016 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8017 IEM_MC_FPU_STACK_DEC_TOP();
8018 IEM_MC_UPDATE_FSW_CONST(0);
8019
8020 IEM_MC_ADVANCE_RIP();
8021 IEM_MC_END();
8022 return VINF_SUCCESS;
8023}
8024
8025
8026/** Opcode 0xd9 0xf7. */
8027FNIEMOP_DEF(iemOp_fincstp)
8028{
8029 IEMOP_MNEMONIC(fincstp, "fincstp");
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8032 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8033 * FINCSTP and FDECSTP. */
8034
8035 IEM_MC_BEGIN(0,0);
8036
8037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8039
8040 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8041 IEM_MC_FPU_STACK_INC_TOP();
8042 IEM_MC_UPDATE_FSW_CONST(0);
8043
8044 IEM_MC_ADVANCE_RIP();
8045 IEM_MC_END();
8046 return VINF_SUCCESS;
8047}
8048
8049
8050/** Opcode 0xd9 0xf8. */
8051FNIEMOP_DEF(iemOp_fprem)
8052{
8053 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8054 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8055}
8056
8057
8058/** Opcode 0xd9 0xf9. */
8059FNIEMOP_DEF(iemOp_fyl2xp1)
8060{
8061 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8062 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8063}
8064
8065
8066/** Opcode 0xd9 0xfa. */
8067FNIEMOP_DEF(iemOp_fsqrt)
8068{
8069 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8070 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8071}
8072
8073
8074/** Opcode 0xd9 0xfb. */
8075FNIEMOP_DEF(iemOp_fsincos)
8076{
8077 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8078 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8079}
8080
8081
8082/** Opcode 0xd9 0xfc. */
8083FNIEMOP_DEF(iemOp_frndint)
8084{
8085 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8086 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8087}
8088
8089
8090/** Opcode 0xd9 0xfd. */
8091FNIEMOP_DEF(iemOp_fscale)
8092{
8093 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8094 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8095}
8096
8097
8098/** Opcode 0xd9 0xfe. */
8099FNIEMOP_DEF(iemOp_fsin)
8100{
8101 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8102 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8103}
8104
8105
8106/** Opcode 0xd9 0xff. */
8107FNIEMOP_DEF(iemOp_fcos)
8108{
8109 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8110 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8111}
8112
8113
8114/** Used by iemOp_EscF1. */
8115IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8116{
8117 /* 0xe0 */ iemOp_fchs,
8118 /* 0xe1 */ iemOp_fabs,
8119 /* 0xe2 */ iemOp_Invalid,
8120 /* 0xe3 */ iemOp_Invalid,
8121 /* 0xe4 */ iemOp_ftst,
8122 /* 0xe5 */ iemOp_fxam,
8123 /* 0xe6 */ iemOp_Invalid,
8124 /* 0xe7 */ iemOp_Invalid,
8125 /* 0xe8 */ iemOp_fld1,
8126 /* 0xe9 */ iemOp_fldl2t,
8127 /* 0xea */ iemOp_fldl2e,
8128 /* 0xeb */ iemOp_fldpi,
8129 /* 0xec */ iemOp_fldlg2,
8130 /* 0xed */ iemOp_fldln2,
8131 /* 0xee */ iemOp_fldz,
8132 /* 0xef */ iemOp_Invalid,
8133 /* 0xf0 */ iemOp_f2xm1,
8134 /* 0xf1 */ iemOp_fyl2x,
8135 /* 0xf2 */ iemOp_fptan,
8136 /* 0xf3 */ iemOp_fpatan,
8137 /* 0xf4 */ iemOp_fxtract,
8138 /* 0xf5 */ iemOp_fprem1,
8139 /* 0xf6 */ iemOp_fdecstp,
8140 /* 0xf7 */ iemOp_fincstp,
8141 /* 0xf8 */ iemOp_fprem,
8142 /* 0xf9 */ iemOp_fyl2xp1,
8143 /* 0xfa */ iemOp_fsqrt,
8144 /* 0xfb */ iemOp_fsincos,
8145 /* 0xfc */ iemOp_frndint,
8146 /* 0xfd */ iemOp_fscale,
8147 /* 0xfe */ iemOp_fsin,
8148 /* 0xff */ iemOp_fcos
8149};
8150
8151
8152/**
8153 * @opcode 0xd9
8154 */
8155FNIEMOP_DEF(iemOp_EscF1)
8156{
8157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8158 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8159
8160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8161 {
8162 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8163 {
8164 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8165 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8166 case 2:
8167 if (bRm == 0xd0)
8168 return FNIEMOP_CALL(iemOp_fnop);
8169 return IEMOP_RAISE_INVALID_OPCODE();
8170 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8171 case 4:
8172 case 5:
8173 case 6:
8174 case 7:
8175 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8176 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8178 }
8179 }
8180 else
8181 {
8182 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8183 {
8184 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8185 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8186 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8187 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8188 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8189 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8190 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8191 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8193 }
8194 }
8195}
8196
8197
8198/** Opcode 0xda 11/0. */
8199FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8200{
8201 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8203
8204 IEM_MC_BEGIN(0, 1);
8205 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8206
8207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8208 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8209
8210 IEM_MC_PREPARE_FPU_USAGE();
8211 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8213 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8214 IEM_MC_ENDIF();
8215 IEM_MC_UPDATE_FPU_OPCODE_IP();
8216 IEM_MC_ELSE()
8217 IEM_MC_FPU_STACK_UNDERFLOW(0);
8218 IEM_MC_ENDIF();
8219 IEM_MC_ADVANCE_RIP();
8220
8221 IEM_MC_END();
8222 return VINF_SUCCESS;
8223}
8224
8225
8226/** Opcode 0xda 11/1. */
8227FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8228{
8229 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8231
8232 IEM_MC_BEGIN(0, 1);
8233 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8234
8235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8237
8238 IEM_MC_PREPARE_FPU_USAGE();
8239 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8241 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8242 IEM_MC_ENDIF();
8243 IEM_MC_UPDATE_FPU_OPCODE_IP();
8244 IEM_MC_ELSE()
8245 IEM_MC_FPU_STACK_UNDERFLOW(0);
8246 IEM_MC_ENDIF();
8247 IEM_MC_ADVANCE_RIP();
8248
8249 IEM_MC_END();
8250 return VINF_SUCCESS;
8251}
8252
8253
8254/** Opcode 0xda 11/2. */
8255FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8256{
8257 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8259
8260 IEM_MC_BEGIN(0, 1);
8261 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8262
8263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8264 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8265
8266 IEM_MC_PREPARE_FPU_USAGE();
8267 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8268 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8269 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8270 IEM_MC_ENDIF();
8271 IEM_MC_UPDATE_FPU_OPCODE_IP();
8272 IEM_MC_ELSE()
8273 IEM_MC_FPU_STACK_UNDERFLOW(0);
8274 IEM_MC_ENDIF();
8275 IEM_MC_ADVANCE_RIP();
8276
8277 IEM_MC_END();
8278 return VINF_SUCCESS;
8279}
8280
8281
8282/** Opcode 0xda 11/3. */
8283FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8284{
8285 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8287
8288 IEM_MC_BEGIN(0, 1);
8289 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8290
8291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8293
8294 IEM_MC_PREPARE_FPU_USAGE();
8295 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8297 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8298 IEM_MC_ENDIF();
8299 IEM_MC_UPDATE_FPU_OPCODE_IP();
8300 IEM_MC_ELSE()
8301 IEM_MC_FPU_STACK_UNDERFLOW(0);
8302 IEM_MC_ENDIF();
8303 IEM_MC_ADVANCE_RIP();
8304
8305 IEM_MC_END();
8306 return VINF_SUCCESS;
8307}
8308
8309
8310/**
8311 * Common worker for FPU instructions working on ST0 and STn, only affecting
8312 * flags, and popping twice when done.
8313 *
8314 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8315 */
8316FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8317{
8318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8319
8320 IEM_MC_BEGIN(3, 1);
8321 IEM_MC_LOCAL(uint16_t, u16Fsw);
8322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8323 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8325
8326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8327 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8328
8329 IEM_MC_PREPARE_FPU_USAGE();
8330 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8331 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8332 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8333 IEM_MC_ELSE()
8334 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8335 IEM_MC_ENDIF();
8336 IEM_MC_ADVANCE_RIP();
8337
8338 IEM_MC_END();
8339 return VINF_SUCCESS;
8340}
8341
8342
8343/** Opcode 0xda 0xe9. */
8344FNIEMOP_DEF(iemOp_fucompp)
8345{
8346 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8347 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8348}
8349
8350
8351/**
8352 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8353 * the result in ST0.
8354 *
8355 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8356 */
8357FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8358{
8359 IEM_MC_BEGIN(3, 3);
8360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8361 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8362 IEM_MC_LOCAL(int32_t, i32Val2);
8363 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8364 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8365 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8366
8367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8369
8370 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8371 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8372 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8373
8374 IEM_MC_PREPARE_FPU_USAGE();
8375 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8376 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8377 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8378 IEM_MC_ELSE()
8379 IEM_MC_FPU_STACK_UNDERFLOW(0);
8380 IEM_MC_ENDIF();
8381 IEM_MC_ADVANCE_RIP();
8382
8383 IEM_MC_END();
8384 return VINF_SUCCESS;
8385}
8386
8387
8388/** Opcode 0xda !11/0. */
8389FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8390{
8391 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8392 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8393}
8394
8395
8396/** Opcode 0xda !11/1. */
8397FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8398{
8399 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8400 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8401}
8402
8403
8404/** Opcode 0xda !11/2. */
8405FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8406{
8407 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8408
8409 IEM_MC_BEGIN(3, 3);
8410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8411 IEM_MC_LOCAL(uint16_t, u16Fsw);
8412 IEM_MC_LOCAL(int32_t, i32Val2);
8413 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8415 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8416
8417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8419
8420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8422 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8423
8424 IEM_MC_PREPARE_FPU_USAGE();
8425 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8426 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8427 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8428 IEM_MC_ELSE()
8429 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8430 IEM_MC_ENDIF();
8431 IEM_MC_ADVANCE_RIP();
8432
8433 IEM_MC_END();
8434 return VINF_SUCCESS;
8435}
8436
8437
8438/** Opcode 0xda !11/3. */
8439FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8440{
8441 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8442
8443 IEM_MC_BEGIN(3, 3);
8444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8445 IEM_MC_LOCAL(uint16_t, u16Fsw);
8446 IEM_MC_LOCAL(int32_t, i32Val2);
8447 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8449 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8450
8451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8453
8454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8456 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8457
8458 IEM_MC_PREPARE_FPU_USAGE();
8459 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8460 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8461 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8462 IEM_MC_ELSE()
8463 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8464 IEM_MC_ENDIF();
8465 IEM_MC_ADVANCE_RIP();
8466
8467 IEM_MC_END();
8468 return VINF_SUCCESS;
8469}
8470
8471
8472/** Opcode 0xda !11/4. */
8473FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8474{
8475 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8476 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8477}
8478
8479
8480/** Opcode 0xda !11/5. */
8481FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8482{
8483 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8484 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8485}
8486
8487
8488/** Opcode 0xda !11/6. */
8489FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8490{
8491 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8492 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8493}
8494
8495
8496/** Opcode 0xda !11/7. */
8497FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8498{
8499 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8500 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8501}
8502
8503
8504/**
8505 * @opcode 0xda
8506 */
8507FNIEMOP_DEF(iemOp_EscF2)
8508{
8509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8510 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8512 {
8513 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8514 {
8515 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8516 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8517 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8518 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8519 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8520 case 5:
8521 if (bRm == 0xe9)
8522 return FNIEMOP_CALL(iemOp_fucompp);
8523 return IEMOP_RAISE_INVALID_OPCODE();
8524 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8525 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8527 }
8528 }
8529 else
8530 {
8531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8532 {
8533 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8534 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8535 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8536 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8537 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8538 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8539 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8540 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8542 }
8543 }
8544}
8545
8546
8547/** Opcode 0xdb !11/0. */
8548FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8549{
8550 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8551
8552 IEM_MC_BEGIN(2, 3);
8553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8554 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8555 IEM_MC_LOCAL(int32_t, i32Val);
8556 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8557 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8558
8559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8561
8562 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8563 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8564 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8565
8566 IEM_MC_PREPARE_FPU_USAGE();
8567 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8568 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8569 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8570 IEM_MC_ELSE()
8571 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8572 IEM_MC_ENDIF();
8573 IEM_MC_ADVANCE_RIP();
8574
8575 IEM_MC_END();
8576 return VINF_SUCCESS;
8577}
8578
8579
8580/** Opcode 0xdb !11/1. */
8581FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8582{
8583 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8584 IEM_MC_BEGIN(3, 2);
8585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8586 IEM_MC_LOCAL(uint16_t, u16Fsw);
8587 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8588 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8589 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8590
8591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8593 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8594 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8595
8596 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8597 IEM_MC_PREPARE_FPU_USAGE();
8598 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8599 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8600 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8601 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8602 IEM_MC_ELSE()
8603 IEM_MC_IF_FCW_IM()
8604 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8605 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8606 IEM_MC_ENDIF();
8607 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8608 IEM_MC_ENDIF();
8609 IEM_MC_ADVANCE_RIP();
8610
8611 IEM_MC_END();
8612 return VINF_SUCCESS;
8613}
8614
8615
8616/** Opcode 0xdb !11/2. */
8617FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8618{
8619 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8620 IEM_MC_BEGIN(3, 2);
8621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8622 IEM_MC_LOCAL(uint16_t, u16Fsw);
8623 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8624 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8625 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8626
8627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8629 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8630 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8631
8632 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8633 IEM_MC_PREPARE_FPU_USAGE();
8634 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8635 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8636 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8637 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8638 IEM_MC_ELSE()
8639 IEM_MC_IF_FCW_IM()
8640 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8641 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8642 IEM_MC_ENDIF();
8643 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8644 IEM_MC_ENDIF();
8645 IEM_MC_ADVANCE_RIP();
8646
8647 IEM_MC_END();
8648 return VINF_SUCCESS;
8649}
8650
8651
8652/** Opcode 0xdb !11/3. */
8653FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8654{
8655 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8656 IEM_MC_BEGIN(3, 2);
8657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8658 IEM_MC_LOCAL(uint16_t, u16Fsw);
8659 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8660 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8662
8663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8667
8668 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8669 IEM_MC_PREPARE_FPU_USAGE();
8670 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8671 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8672 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8673 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8674 IEM_MC_ELSE()
8675 IEM_MC_IF_FCW_IM()
8676 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8677 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8678 IEM_MC_ENDIF();
8679 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8680 IEM_MC_ENDIF();
8681 IEM_MC_ADVANCE_RIP();
8682
8683 IEM_MC_END();
8684 return VINF_SUCCESS;
8685}
8686
8687
8688/** Opcode 0xdb !11/5. */
8689FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8690{
8691 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8692
8693 IEM_MC_BEGIN(2, 3);
8694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8695 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8696 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8697 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8698 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8699
8700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702
8703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8705 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8706
8707 IEM_MC_PREPARE_FPU_USAGE();
8708 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8709 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8710 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8711 IEM_MC_ELSE()
8712 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8713 IEM_MC_ENDIF();
8714 IEM_MC_ADVANCE_RIP();
8715
8716 IEM_MC_END();
8717 return VINF_SUCCESS;
8718}
8719
8720
8721/** Opcode 0xdb !11/7. */
8722FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8723{
8724 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8725 IEM_MC_BEGIN(3, 2);
8726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8727 IEM_MC_LOCAL(uint16_t, u16Fsw);
8728 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8729 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8730 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8731
8732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8736
8737 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8738 IEM_MC_PREPARE_FPU_USAGE();
8739 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8740 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8741 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8742 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8743 IEM_MC_ELSE()
8744 IEM_MC_IF_FCW_IM()
8745 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8746 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8747 IEM_MC_ENDIF();
8748 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8749 IEM_MC_ENDIF();
8750 IEM_MC_ADVANCE_RIP();
8751
8752 IEM_MC_END();
8753 return VINF_SUCCESS;
8754}
8755
8756
8757/** Opcode 0xdb 11/0. */
8758FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8759{
8760 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762
8763 IEM_MC_BEGIN(0, 1);
8764 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8765
8766 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8767 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8768
8769 IEM_MC_PREPARE_FPU_USAGE();
8770 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8771 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8772 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8773 IEM_MC_ENDIF();
8774 IEM_MC_UPDATE_FPU_OPCODE_IP();
8775 IEM_MC_ELSE()
8776 IEM_MC_FPU_STACK_UNDERFLOW(0);
8777 IEM_MC_ENDIF();
8778 IEM_MC_ADVANCE_RIP();
8779
8780 IEM_MC_END();
8781 return VINF_SUCCESS;
8782}
8783
8784
8785/** Opcode 0xdb 11/1. */
8786FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8787{
8788 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8790
8791 IEM_MC_BEGIN(0, 1);
8792 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8793
8794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8795 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8796
8797 IEM_MC_PREPARE_FPU_USAGE();
8798 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8799 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8800 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8801 IEM_MC_ENDIF();
8802 IEM_MC_UPDATE_FPU_OPCODE_IP();
8803 IEM_MC_ELSE()
8804 IEM_MC_FPU_STACK_UNDERFLOW(0);
8805 IEM_MC_ENDIF();
8806 IEM_MC_ADVANCE_RIP();
8807
8808 IEM_MC_END();
8809 return VINF_SUCCESS;
8810}
8811
8812
8813/** Opcode 0xdb 11/2. */
8814FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8815{
8816 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8818
8819 IEM_MC_BEGIN(0, 1);
8820 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8821
8822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8824
8825 IEM_MC_PREPARE_FPU_USAGE();
8826 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8827 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8828 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8829 IEM_MC_ENDIF();
8830 IEM_MC_UPDATE_FPU_OPCODE_IP();
8831 IEM_MC_ELSE()
8832 IEM_MC_FPU_STACK_UNDERFLOW(0);
8833 IEM_MC_ENDIF();
8834 IEM_MC_ADVANCE_RIP();
8835
8836 IEM_MC_END();
8837 return VINF_SUCCESS;
8838}
8839
8840
8841/** Opcode 0xdb 11/3. */
8842FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8843{
8844 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846
8847 IEM_MC_BEGIN(0, 1);
8848 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8849
8850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8852
8853 IEM_MC_PREPARE_FPU_USAGE();
8854 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8855 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8856 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8857 IEM_MC_ENDIF();
8858 IEM_MC_UPDATE_FPU_OPCODE_IP();
8859 IEM_MC_ELSE()
8860 IEM_MC_FPU_STACK_UNDERFLOW(0);
8861 IEM_MC_ENDIF();
8862 IEM_MC_ADVANCE_RIP();
8863
8864 IEM_MC_END();
8865 return VINF_SUCCESS;
8866}
8867
8868
8869/** Opcode 0xdb 0xe0. */
8870FNIEMOP_DEF(iemOp_fneni)
8871{
8872 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8874 IEM_MC_BEGIN(0,0);
8875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8876 IEM_MC_ADVANCE_RIP();
8877 IEM_MC_END();
8878 return VINF_SUCCESS;
8879}
8880
8881
8882/** Opcode 0xdb 0xe1. */
8883FNIEMOP_DEF(iemOp_fndisi)
8884{
8885 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887 IEM_MC_BEGIN(0,0);
8888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8889 IEM_MC_ADVANCE_RIP();
8890 IEM_MC_END();
8891 return VINF_SUCCESS;
8892}
8893
8894
8895/** Opcode 0xdb 0xe2. */
8896FNIEMOP_DEF(iemOp_fnclex)
8897{
8898 IEMOP_MNEMONIC(fnclex, "fnclex");
8899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8900
8901 IEM_MC_BEGIN(0,0);
8902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8903 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8904 IEM_MC_CLEAR_FSW_EX();
8905 IEM_MC_ADVANCE_RIP();
8906 IEM_MC_END();
8907 return VINF_SUCCESS;
8908}
8909
8910
8911/** Opcode 0xdb 0xe3. */
8912FNIEMOP_DEF(iemOp_fninit)
8913{
8914 IEMOP_MNEMONIC(fninit, "fninit");
8915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8916 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8917}
8918
8919
8920/** Opcode 0xdb 0xe4. */
8921FNIEMOP_DEF(iemOp_fnsetpm)
8922{
8923 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8925 IEM_MC_BEGIN(0,0);
8926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8927 IEM_MC_ADVANCE_RIP();
8928 IEM_MC_END();
8929 return VINF_SUCCESS;
8930}
8931
8932
8933/** Opcode 0xdb 0xe5. */
8934FNIEMOP_DEF(iemOp_frstpm)
8935{
8936 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8937#if 0 /* #UDs on newer CPUs */
8938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8939 IEM_MC_BEGIN(0,0);
8940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8941 IEM_MC_ADVANCE_RIP();
8942 IEM_MC_END();
8943 return VINF_SUCCESS;
8944#else
8945 return IEMOP_RAISE_INVALID_OPCODE();
8946#endif
8947}
8948
8949
8950/** Opcode 0xdb 11/5. */
8951FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8952{
8953 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8954 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8955}
8956
8957
8958/** Opcode 0xdb 11/6. */
8959FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8960{
8961 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8962 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8963}
8964
8965
8966/**
8967 * @opcode 0xdb
8968 */
8969FNIEMOP_DEF(iemOp_EscF3)
8970{
8971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8972 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8974 {
8975 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8976 {
8977 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8978 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8979 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8980 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8981 case 4:
8982 switch (bRm)
8983 {
8984 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8985 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8986 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8987 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8988 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8989 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8990 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8991 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8993 }
8994 break;
8995 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8996 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8997 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8999 }
9000 }
9001 else
9002 {
9003 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9004 {
9005 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9006 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9007 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9008 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9009 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9010 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9011 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9012 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9014 }
9015 }
9016}
9017
9018
9019/**
9020 * Common worker for FPU instructions working on STn and ST0, and storing the
9021 * result in STn unless IE, DE or ZE was raised.
9022 *
9023 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9024 */
9025FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9026{
9027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9028
9029 IEM_MC_BEGIN(3, 1);
9030 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9031 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9032 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9034
9035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9037
9038 IEM_MC_PREPARE_FPU_USAGE();
9039 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9040 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9041 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9042 IEM_MC_ELSE()
9043 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9044 IEM_MC_ENDIF();
9045 IEM_MC_ADVANCE_RIP();
9046
9047 IEM_MC_END();
9048 return VINF_SUCCESS;
9049}
9050
9051
9052/** Opcode 0xdc 11/0. */
9053FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9054{
9055 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9056 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9057}
9058
9059
9060/** Opcode 0xdc 11/1. */
9061FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9062{
9063 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9064 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9065}
9066
9067
9068/** Opcode 0xdc 11/4. */
9069FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9070{
9071 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9072 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9073}
9074
9075
9076/** Opcode 0xdc 11/5. */
9077FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9078{
9079 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9080 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9081}
9082
9083
9084/** Opcode 0xdc 11/6. */
9085FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9086{
9087 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9088 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9089}
9090
9091
9092/** Opcode 0xdc 11/7. */
9093FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9094{
9095 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9096 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9097}
9098
9099
9100/**
9101 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9102 * memory operand, and storing the result in ST0.
9103 *
9104 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9105 */
9106FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9107{
9108 IEM_MC_BEGIN(3, 3);
9109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9110 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9111 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9112 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9113 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9114 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9115
9116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9120
9121 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9122 IEM_MC_PREPARE_FPU_USAGE();
9123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9124 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9125 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9126 IEM_MC_ELSE()
9127 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9128 IEM_MC_ENDIF();
9129 IEM_MC_ADVANCE_RIP();
9130
9131 IEM_MC_END();
9132 return VINF_SUCCESS;
9133}
9134
9135
9136/** Opcode 0xdc !11/0. */
9137FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9138{
9139 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9140 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9141}
9142
9143
9144/** Opcode 0xdc !11/1. */
9145FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9146{
9147 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9148 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9149}
9150
9151
9152/** Opcode 0xdc !11/2. */
9153FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9154{
9155 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9156
9157 IEM_MC_BEGIN(3, 3);
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9159 IEM_MC_LOCAL(uint16_t, u16Fsw);
9160 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9161 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9162 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9163 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9164
9165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9167
9168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9169 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9170 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9171
9172 IEM_MC_PREPARE_FPU_USAGE();
9173 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9174 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9175 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9176 IEM_MC_ELSE()
9177 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9178 IEM_MC_ENDIF();
9179 IEM_MC_ADVANCE_RIP();
9180
9181 IEM_MC_END();
9182 return VINF_SUCCESS;
9183}
9184
9185
9186/** Opcode 0xdc !11/3. */
9187FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9188{
9189 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9190
9191 IEM_MC_BEGIN(3, 3);
9192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9193 IEM_MC_LOCAL(uint16_t, u16Fsw);
9194 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9195 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9197 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9198
9199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9201
9202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9204 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9205
9206 IEM_MC_PREPARE_FPU_USAGE();
9207 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9208 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9209 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9210 IEM_MC_ELSE()
9211 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9212 IEM_MC_ENDIF();
9213 IEM_MC_ADVANCE_RIP();
9214
9215 IEM_MC_END();
9216 return VINF_SUCCESS;
9217}
9218
9219
9220/** Opcode 0xdc !11/4. */
9221FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9222{
9223 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9224 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9225}
9226
9227
9228/** Opcode 0xdc !11/5. */
9229FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9230{
9231 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9232 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9233}
9234
9235
9236/** Opcode 0xdc !11/6. */
9237FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9238{
9239 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9240 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9241}
9242
9243
9244/** Opcode 0xdc !11/7. */
9245FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9246{
9247 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9248 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9249}
9250
9251
9252/**
9253 * @opcode 0xdc
9254 */
9255FNIEMOP_DEF(iemOp_EscF4)
9256{
9257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9258 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9259 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9260 {
9261 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9262 {
9263 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9264 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9265 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9266 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9267 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9268 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9269 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9270 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9272 }
9273 }
9274 else
9275 {
9276 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9277 {
9278 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9279 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9280 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9281 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9282 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9283 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9284 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9285 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9287 }
9288 }
9289}
9290
9291
9292/** Opcode 0xdd !11/0.
9293 * @sa iemOp_fld_m32r */
9294FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9295{
9296 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9297
9298 IEM_MC_BEGIN(2, 3);
9299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9300 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9301 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9302 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9303 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9304
9305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9309
9310 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9311 IEM_MC_PREPARE_FPU_USAGE();
9312 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9313 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9314 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9315 IEM_MC_ELSE()
9316 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9317 IEM_MC_ENDIF();
9318 IEM_MC_ADVANCE_RIP();
9319
9320 IEM_MC_END();
9321 return VINF_SUCCESS;
9322}
9323
9324
9325/** Opcode 0xdd !11/0. */
9326FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9327{
9328 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9329 IEM_MC_BEGIN(3, 2);
9330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9331 IEM_MC_LOCAL(uint16_t, u16Fsw);
9332 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9333 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9334 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9335
9336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9338 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9339 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9340
9341 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9342 IEM_MC_PREPARE_FPU_USAGE();
9343 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9344 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9345 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9346 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9347 IEM_MC_ELSE()
9348 IEM_MC_IF_FCW_IM()
9349 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9350 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9351 IEM_MC_ENDIF();
9352 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9353 IEM_MC_ENDIF();
9354 IEM_MC_ADVANCE_RIP();
9355
9356 IEM_MC_END();
9357 return VINF_SUCCESS;
9358}
9359
9360
9361/** Opcode 0xdd !11/0. */
9362FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9363{
9364 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9365 IEM_MC_BEGIN(3, 2);
9366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9367 IEM_MC_LOCAL(uint16_t, u16Fsw);
9368 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9369 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9371
9372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9376
9377 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9378 IEM_MC_PREPARE_FPU_USAGE();
9379 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9380 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9381 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9382 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9383 IEM_MC_ELSE()
9384 IEM_MC_IF_FCW_IM()
9385 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9386 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9387 IEM_MC_ENDIF();
9388 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9389 IEM_MC_ENDIF();
9390 IEM_MC_ADVANCE_RIP();
9391
9392 IEM_MC_END();
9393 return VINF_SUCCESS;
9394}
9395
9396
9397
9398
9399/** Opcode 0xdd !11/0. */
9400FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9401{
9402 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9403 IEM_MC_BEGIN(3, 2);
9404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9405 IEM_MC_LOCAL(uint16_t, u16Fsw);
9406 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9407 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9408 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9409
9410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9412 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9413 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9414
9415 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9416 IEM_MC_PREPARE_FPU_USAGE();
9417 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9418 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9419 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9420 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9421 IEM_MC_ELSE()
9422 IEM_MC_IF_FCW_IM()
9423 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9424 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9425 IEM_MC_ENDIF();
9426 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9427 IEM_MC_ENDIF();
9428 IEM_MC_ADVANCE_RIP();
9429
9430 IEM_MC_END();
9431 return VINF_SUCCESS;
9432}
9433
9434
9435/** Opcode 0xdd !11/0. */
9436FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9437{
9438 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9439 IEM_MC_BEGIN(3, 0);
9440 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9441 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9442 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9447 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9448 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9449 IEM_MC_END();
9450 return VINF_SUCCESS;
9451}
9452
9453
9454/** Opcode 0xdd !11/0. */
9455FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9456{
9457 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9458 IEM_MC_BEGIN(3, 0);
9459 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9460 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9461 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9464 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9465 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9466 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9467 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9468 IEM_MC_END();
9469 return VINF_SUCCESS;
9470
9471}
9472
9473/** Opcode 0xdd !11/0. */
9474FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9475{
9476 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9477
9478 IEM_MC_BEGIN(0, 2);
9479 IEM_MC_LOCAL(uint16_t, u16Tmp);
9480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9481
9482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9485
9486 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9487 IEM_MC_FETCH_FSW(u16Tmp);
9488 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9489 IEM_MC_ADVANCE_RIP();
9490
9491/** @todo Debug / drop a hint to the verifier that things may differ
9492 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9493 * NT4SP1. (X86_FSW_PE) */
9494 IEM_MC_END();
9495 return VINF_SUCCESS;
9496}
9497
9498
9499/** Opcode 0xdd 11/0. */
9500FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9501{
9502 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9504 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9505 unmodified. */
9506
9507 IEM_MC_BEGIN(0, 0);
9508
9509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9510 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9511
9512 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9513 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9514 IEM_MC_UPDATE_FPU_OPCODE_IP();
9515
9516 IEM_MC_ADVANCE_RIP();
9517 IEM_MC_END();
9518 return VINF_SUCCESS;
9519}
9520
9521
9522/** Opcode 0xdd 11/1. */
9523FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9524{
9525 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9527
9528 IEM_MC_BEGIN(0, 2);
9529 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9530 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9532 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9533
9534 IEM_MC_PREPARE_FPU_USAGE();
9535 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9536 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9537 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9538 IEM_MC_ELSE()
9539 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9540 IEM_MC_ENDIF();
9541
9542 IEM_MC_ADVANCE_RIP();
9543 IEM_MC_END();
9544 return VINF_SUCCESS;
9545}
9546
9547
9548/** Opcode 0xdd 11/3. */
9549FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9550{
9551 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9552 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9553}
9554
9555
9556/** Opcode 0xdd 11/4. */
9557FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9558{
9559 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9560 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9561}
9562
9563
9564/**
9565 * @opcode 0xdd
9566 */
9567FNIEMOP_DEF(iemOp_EscF5)
9568{
9569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9570 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9572 {
9573 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9574 {
9575 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9576 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9577 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9578 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9579 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9580 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9581 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9582 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9584 }
9585 }
9586 else
9587 {
9588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9589 {
9590 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9591 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9592 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9593 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9594 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9595 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9596 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9597 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9599 }
9600 }
9601}
9602
9603
9604/** Opcode 0xde 11/0. */
9605FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9606{
9607 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9608 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9609}
9610
9611
9612/** Opcode 0xde 11/0. */
9613FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9614{
9615 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9616 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9617}
9618
9619
9620/** Opcode 0xde 0xd9. */
9621FNIEMOP_DEF(iemOp_fcompp)
9622{
9623 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9624 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9625}
9626
9627
9628/** Opcode 0xde 11/4. */
9629FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9630{
9631 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9632 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9633}
9634
9635
9636/** Opcode 0xde 11/5. */
9637FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9638{
9639 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9640 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9641}
9642
9643
9644/** Opcode 0xde 11/6. */
9645FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9646{
9647 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9648 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9649}
9650
9651
9652/** Opcode 0xde 11/7. */
9653FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9654{
9655 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9656 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9657}
9658
9659
9660/**
9661 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9662 * the result in ST0.
9663 *
9664 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9665 */
9666FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9667{
9668 IEM_MC_BEGIN(3, 3);
9669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9670 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9671 IEM_MC_LOCAL(int16_t, i16Val2);
9672 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9673 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9674 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9675
9676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9678
9679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9681 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9682
9683 IEM_MC_PREPARE_FPU_USAGE();
9684 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9685 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9686 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9687 IEM_MC_ELSE()
9688 IEM_MC_FPU_STACK_UNDERFLOW(0);
9689 IEM_MC_ENDIF();
9690 IEM_MC_ADVANCE_RIP();
9691
9692 IEM_MC_END();
9693 return VINF_SUCCESS;
9694}
9695
9696
9697/** Opcode 0xde !11/0. */
9698FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9699{
9700 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9701 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9702}
9703
9704
9705/** Opcode 0xde !11/1. */
9706FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9707{
9708 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9710}
9711
9712
9713/** Opcode 0xde !11/2. */
9714FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9715{
9716 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9717
9718 IEM_MC_BEGIN(3, 3);
9719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9720 IEM_MC_LOCAL(uint16_t, u16Fsw);
9721 IEM_MC_LOCAL(int16_t, i16Val2);
9722 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9723 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9724 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9725
9726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9728
9729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9731 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9732
9733 IEM_MC_PREPARE_FPU_USAGE();
9734 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9735 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9736 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9737 IEM_MC_ELSE()
9738 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9739 IEM_MC_ENDIF();
9740 IEM_MC_ADVANCE_RIP();
9741
9742 IEM_MC_END();
9743 return VINF_SUCCESS;
9744}
9745
9746
9747/** Opcode 0xde !11/3. */
9748FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9749{
9750 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9751
9752 IEM_MC_BEGIN(3, 3);
9753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9754 IEM_MC_LOCAL(uint16_t, u16Fsw);
9755 IEM_MC_LOCAL(int16_t, i16Val2);
9756 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9757 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9758 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9759
9760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9762
9763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9765 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9766
9767 IEM_MC_PREPARE_FPU_USAGE();
9768 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9769 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9770 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9771 IEM_MC_ELSE()
9772 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9773 IEM_MC_ENDIF();
9774 IEM_MC_ADVANCE_RIP();
9775
9776 IEM_MC_END();
9777 return VINF_SUCCESS;
9778}
9779
9780
9781/** Opcode 0xde !11/4. */
9782FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9783{
9784 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9785 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9786}
9787
9788
9789/** Opcode 0xde !11/5. */
9790FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9791{
9792 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9793 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9794}
9795
9796
9797/** Opcode 0xde !11/6. */
9798FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9799{
9800 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9801 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9802}
9803
9804
9805/** Opcode 0xde !11/7. */
9806FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9807{
9808 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9809 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9810}
9811
9812
9813/**
9814 * @opcode 0xde
9815 */
9816FNIEMOP_DEF(iemOp_EscF6)
9817{
9818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9819 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9821 {
9822 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9823 {
9824 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9825 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9826 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9827 case 3: if (bRm == 0xd9)
9828 return FNIEMOP_CALL(iemOp_fcompp);
9829 return IEMOP_RAISE_INVALID_OPCODE();
9830 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9831 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9832 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9833 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9835 }
9836 }
9837 else
9838 {
9839 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9840 {
9841 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9842 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9843 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9844 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9845 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9846 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9847 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9848 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9850 }
9851 }
9852}
9853
9854
9855/** Opcode 0xdf 11/0.
9856 * Undocument instruction, assumed to work like ffree + fincstp. */
9857FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9858{
9859 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9861
9862 IEM_MC_BEGIN(0, 0);
9863
9864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9865 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9866
9867 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9868 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9869 IEM_MC_FPU_STACK_INC_TOP();
9870 IEM_MC_UPDATE_FPU_OPCODE_IP();
9871
9872 IEM_MC_ADVANCE_RIP();
9873 IEM_MC_END();
9874 return VINF_SUCCESS;
9875}
9876
9877
9878/** Opcode 0xdf 0xe0. */
9879FNIEMOP_DEF(iemOp_fnstsw_ax)
9880{
9881 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9883
9884 IEM_MC_BEGIN(0, 1);
9885 IEM_MC_LOCAL(uint16_t, u16Tmp);
9886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9887 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9888 IEM_MC_FETCH_FSW(u16Tmp);
9889 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9890 IEM_MC_ADVANCE_RIP();
9891 IEM_MC_END();
9892 return VINF_SUCCESS;
9893}
9894
9895
9896/** Opcode 0xdf 11/5. */
9897FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9898{
9899 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9900 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9901}
9902
9903
9904/** Opcode 0xdf 11/6. */
9905FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9906{
9907 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9908 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9909}
9910
9911
9912/** Opcode 0xdf !11/0. */
9913FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9914{
9915 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9916
9917 IEM_MC_BEGIN(2, 3);
9918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9919 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9920 IEM_MC_LOCAL(int16_t, i16Val);
9921 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9922 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9923
9924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9926
9927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9928 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9929 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9930
9931 IEM_MC_PREPARE_FPU_USAGE();
9932 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9933 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9934 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9935 IEM_MC_ELSE()
9936 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9937 IEM_MC_ENDIF();
9938 IEM_MC_ADVANCE_RIP();
9939
9940 IEM_MC_END();
9941 return VINF_SUCCESS;
9942}
9943
9944
9945/** Opcode 0xdf !11/1. */
9946FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9947{
9948 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9949 IEM_MC_BEGIN(3, 2);
9950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9951 IEM_MC_LOCAL(uint16_t, u16Fsw);
9952 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9953 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9954 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9955
9956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9960
9961 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9962 IEM_MC_PREPARE_FPU_USAGE();
9963 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9964 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9965 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9966 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9967 IEM_MC_ELSE()
9968 IEM_MC_IF_FCW_IM()
9969 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9970 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9971 IEM_MC_ENDIF();
9972 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9973 IEM_MC_ENDIF();
9974 IEM_MC_ADVANCE_RIP();
9975
9976 IEM_MC_END();
9977 return VINF_SUCCESS;
9978}
9979
9980
9981/** Opcode 0xdf !11/2. */
9982FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9983{
9984 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9985 IEM_MC_BEGIN(3, 2);
9986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9987 IEM_MC_LOCAL(uint16_t, u16Fsw);
9988 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9989 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9990 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9991
9992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9994 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9995 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9996
9997 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9998 IEM_MC_PREPARE_FPU_USAGE();
9999 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10000 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10001 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10002 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10003 IEM_MC_ELSE()
10004 IEM_MC_IF_FCW_IM()
10005 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10006 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10007 IEM_MC_ENDIF();
10008 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10009 IEM_MC_ENDIF();
10010 IEM_MC_ADVANCE_RIP();
10011
10012 IEM_MC_END();
10013 return VINF_SUCCESS;
10014}
10015
10016
10017/** Opcode 0xdf !11/3. */
10018FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10019{
10020 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10021 IEM_MC_BEGIN(3, 2);
10022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10023 IEM_MC_LOCAL(uint16_t, u16Fsw);
10024 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10025 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10027
10028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10030 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10031 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10032
10033 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10034 IEM_MC_PREPARE_FPU_USAGE();
10035 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10036 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10037 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10038 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10039 IEM_MC_ELSE()
10040 IEM_MC_IF_FCW_IM()
10041 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10042 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10043 IEM_MC_ENDIF();
10044 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10045 IEM_MC_ENDIF();
10046 IEM_MC_ADVANCE_RIP();
10047
10048 IEM_MC_END();
10049 return VINF_SUCCESS;
10050}
10051
10052
10053/** Opcode 0xdf !11/4. */
10054FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10055
10056
10057/** Opcode 0xdf !11/5. */
10058FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10059{
10060 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10061
10062 IEM_MC_BEGIN(2, 3);
10063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10064 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10065 IEM_MC_LOCAL(int64_t, i64Val);
10066 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10067 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10068
10069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10071
10072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10074 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10075
10076 IEM_MC_PREPARE_FPU_USAGE();
10077 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10078 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10079 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10080 IEM_MC_ELSE()
10081 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10082 IEM_MC_ENDIF();
10083 IEM_MC_ADVANCE_RIP();
10084
10085 IEM_MC_END();
10086 return VINF_SUCCESS;
10087}
10088
10089
10090/** Opcode 0xdf !11/6. */
10091FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
10092
10093
10094/** Opcode 0xdf !11/7. */
10095FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10096{
10097 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10098 IEM_MC_BEGIN(3, 2);
10099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10100 IEM_MC_LOCAL(uint16_t, u16Fsw);
10101 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10102 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10103 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10104
10105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10108 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10109
10110 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10111 IEM_MC_PREPARE_FPU_USAGE();
10112 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10113 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10114 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10115 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10116 IEM_MC_ELSE()
10117 IEM_MC_IF_FCW_IM()
10118 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10119 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10120 IEM_MC_ENDIF();
10121 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10122 IEM_MC_ENDIF();
10123 IEM_MC_ADVANCE_RIP();
10124
10125 IEM_MC_END();
10126 return VINF_SUCCESS;
10127}
10128
10129
10130/**
10131 * @opcode 0xdf
10132 */
10133FNIEMOP_DEF(iemOp_EscF7)
10134{
10135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10136 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10137 {
10138 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10139 {
10140 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10141 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10142 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10143 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10144 case 4: if (bRm == 0xe0)
10145 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10146 return IEMOP_RAISE_INVALID_OPCODE();
10147 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10148 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10149 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10151 }
10152 }
10153 else
10154 {
10155 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10156 {
10157 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10158 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10159 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10160 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10161 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10162 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10163 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10164 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10166 }
10167 }
10168}
10169
10170
10171/**
10172 * @opcode 0xe0
10173 */
10174FNIEMOP_DEF(iemOp_loopne_Jb)
10175{
10176 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10177 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10179 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10180
10181 switch (pVCpu->iem.s.enmEffAddrMode)
10182 {
10183 case IEMMODE_16BIT:
10184 IEM_MC_BEGIN(0,0);
10185 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10186 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10187 IEM_MC_REL_JMP_S8(i8Imm);
10188 } IEM_MC_ELSE() {
10189 IEM_MC_ADVANCE_RIP();
10190 } IEM_MC_ENDIF();
10191 IEM_MC_END();
10192 return VINF_SUCCESS;
10193
10194 case IEMMODE_32BIT:
10195 IEM_MC_BEGIN(0,0);
10196 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10197 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10198 IEM_MC_REL_JMP_S8(i8Imm);
10199 } IEM_MC_ELSE() {
10200 IEM_MC_ADVANCE_RIP();
10201 } IEM_MC_ENDIF();
10202 IEM_MC_END();
10203 return VINF_SUCCESS;
10204
10205 case IEMMODE_64BIT:
10206 IEM_MC_BEGIN(0,0);
10207 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10208 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10209 IEM_MC_REL_JMP_S8(i8Imm);
10210 } IEM_MC_ELSE() {
10211 IEM_MC_ADVANCE_RIP();
10212 } IEM_MC_ENDIF();
10213 IEM_MC_END();
10214 return VINF_SUCCESS;
10215
10216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10217 }
10218}
10219
10220
10221/**
10222 * @opcode 0xe1
10223 */
10224FNIEMOP_DEF(iemOp_loope_Jb)
10225{
10226 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10227 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10230
10231 switch (pVCpu->iem.s.enmEffAddrMode)
10232 {
10233 case IEMMODE_16BIT:
10234 IEM_MC_BEGIN(0,0);
10235 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10236 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10237 IEM_MC_REL_JMP_S8(i8Imm);
10238 } IEM_MC_ELSE() {
10239 IEM_MC_ADVANCE_RIP();
10240 } IEM_MC_ENDIF();
10241 IEM_MC_END();
10242 return VINF_SUCCESS;
10243
10244 case IEMMODE_32BIT:
10245 IEM_MC_BEGIN(0,0);
10246 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10247 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10248 IEM_MC_REL_JMP_S8(i8Imm);
10249 } IEM_MC_ELSE() {
10250 IEM_MC_ADVANCE_RIP();
10251 } IEM_MC_ENDIF();
10252 IEM_MC_END();
10253 return VINF_SUCCESS;
10254
10255 case IEMMODE_64BIT:
10256 IEM_MC_BEGIN(0,0);
10257 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10258 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10259 IEM_MC_REL_JMP_S8(i8Imm);
10260 } IEM_MC_ELSE() {
10261 IEM_MC_ADVANCE_RIP();
10262 } IEM_MC_ENDIF();
10263 IEM_MC_END();
10264 return VINF_SUCCESS;
10265
10266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10267 }
10268}
10269
10270
10271/**
10272 * @opcode 0xe2
10273 */
10274FNIEMOP_DEF(iemOp_loop_Jb)
10275{
10276 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10277 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10280
10281 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10282 * using the 32-bit operand size override. How can that be restarted? See
10283 * weird pseudo code in intel manual. */
10284 switch (pVCpu->iem.s.enmEffAddrMode)
10285 {
10286 case IEMMODE_16BIT:
10287 IEM_MC_BEGIN(0,0);
10288 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10289 {
10290 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10291 IEM_MC_IF_CX_IS_NZ() {
10292 IEM_MC_REL_JMP_S8(i8Imm);
10293 } IEM_MC_ELSE() {
10294 IEM_MC_ADVANCE_RIP();
10295 } IEM_MC_ENDIF();
10296 }
10297 else
10298 {
10299 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10300 IEM_MC_ADVANCE_RIP();
10301 }
10302 IEM_MC_END();
10303 return VINF_SUCCESS;
10304
10305 case IEMMODE_32BIT:
10306 IEM_MC_BEGIN(0,0);
10307 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10308 {
10309 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10310 IEM_MC_IF_ECX_IS_NZ() {
10311 IEM_MC_REL_JMP_S8(i8Imm);
10312 } IEM_MC_ELSE() {
10313 IEM_MC_ADVANCE_RIP();
10314 } IEM_MC_ENDIF();
10315 }
10316 else
10317 {
10318 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10319 IEM_MC_ADVANCE_RIP();
10320 }
10321 IEM_MC_END();
10322 return VINF_SUCCESS;
10323
10324 case IEMMODE_64BIT:
10325 IEM_MC_BEGIN(0,0);
10326 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10327 {
10328 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10329 IEM_MC_IF_RCX_IS_NZ() {
10330 IEM_MC_REL_JMP_S8(i8Imm);
10331 } IEM_MC_ELSE() {
10332 IEM_MC_ADVANCE_RIP();
10333 } IEM_MC_ENDIF();
10334 }
10335 else
10336 {
10337 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10338 IEM_MC_ADVANCE_RIP();
10339 }
10340 IEM_MC_END();
10341 return VINF_SUCCESS;
10342
10343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10344 }
10345}
10346
10347
10348/**
10349 * @opcode 0xe3
10350 */
10351FNIEMOP_DEF(iemOp_jecxz_Jb)
10352{
10353 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10354 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10356 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10357
10358 switch (pVCpu->iem.s.enmEffAddrMode)
10359 {
10360 case IEMMODE_16BIT:
10361 IEM_MC_BEGIN(0,0);
10362 IEM_MC_IF_CX_IS_NZ() {
10363 IEM_MC_ADVANCE_RIP();
10364 } IEM_MC_ELSE() {
10365 IEM_MC_REL_JMP_S8(i8Imm);
10366 } IEM_MC_ENDIF();
10367 IEM_MC_END();
10368 return VINF_SUCCESS;
10369
10370 case IEMMODE_32BIT:
10371 IEM_MC_BEGIN(0,0);
10372 IEM_MC_IF_ECX_IS_NZ() {
10373 IEM_MC_ADVANCE_RIP();
10374 } IEM_MC_ELSE() {
10375 IEM_MC_REL_JMP_S8(i8Imm);
10376 } IEM_MC_ENDIF();
10377 IEM_MC_END();
10378 return VINF_SUCCESS;
10379
10380 case IEMMODE_64BIT:
10381 IEM_MC_BEGIN(0,0);
10382 IEM_MC_IF_RCX_IS_NZ() {
10383 IEM_MC_ADVANCE_RIP();
10384 } IEM_MC_ELSE() {
10385 IEM_MC_REL_JMP_S8(i8Imm);
10386 } IEM_MC_ENDIF();
10387 IEM_MC_END();
10388 return VINF_SUCCESS;
10389
10390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10391 }
10392}
10393
10394
10395/** Opcode 0xe4 */
10396FNIEMOP_DEF(iemOp_in_AL_Ib)
10397{
10398 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10399 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10402}
10403
10404
10405/** Opcode 0xe5 */
10406FNIEMOP_DEF(iemOp_in_eAX_Ib)
10407{
10408 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10409 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10411 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10412}
10413
10414
10415/** Opcode 0xe6 */
10416FNIEMOP_DEF(iemOp_out_Ib_AL)
10417{
10418 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10419 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10421 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10422}
10423
10424
10425/** Opcode 0xe7 */
10426FNIEMOP_DEF(iemOp_out_Ib_eAX)
10427{
10428 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10429 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10431 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10432}
10433
10434
10435/**
10436 * @opcode 0xe8
10437 */
10438FNIEMOP_DEF(iemOp_call_Jv)
10439{
10440 IEMOP_MNEMONIC(call_Jv, "call Jv");
10441 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10442 switch (pVCpu->iem.s.enmEffOpSize)
10443 {
10444 case IEMMODE_16BIT:
10445 {
10446 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10447 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10448 }
10449
10450 case IEMMODE_32BIT:
10451 {
10452 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10453 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10454 }
10455
10456 case IEMMODE_64BIT:
10457 {
10458 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10459 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10460 }
10461
10462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10463 }
10464}
10465
10466
10467/**
10468 * @opcode 0xe9
10469 */
10470FNIEMOP_DEF(iemOp_jmp_Jv)
10471{
10472 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10474 switch (pVCpu->iem.s.enmEffOpSize)
10475 {
10476 case IEMMODE_16BIT:
10477 {
10478 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10479 IEM_MC_BEGIN(0, 0);
10480 IEM_MC_REL_JMP_S16(i16Imm);
10481 IEM_MC_END();
10482 return VINF_SUCCESS;
10483 }
10484
10485 case IEMMODE_64BIT:
10486 case IEMMODE_32BIT:
10487 {
10488 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10489 IEM_MC_BEGIN(0, 0);
10490 IEM_MC_REL_JMP_S32(i32Imm);
10491 IEM_MC_END();
10492 return VINF_SUCCESS;
10493 }
10494
10495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10496 }
10497}
10498
10499
10500/**
10501 * @opcode 0xea
10502 */
10503FNIEMOP_DEF(iemOp_jmp_Ap)
10504{
10505 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10506 IEMOP_HLP_NO_64BIT();
10507
10508 /* Decode the far pointer address and pass it on to the far call C implementation. */
10509 uint32_t offSeg;
10510 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10511 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10512 else
10513 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10514 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10516 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10517}
10518
10519
10520/**
10521 * @opcode 0xeb
10522 */
10523FNIEMOP_DEF(iemOp_jmp_Jb)
10524{
10525 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10526 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10529
10530 IEM_MC_BEGIN(0, 0);
10531 IEM_MC_REL_JMP_S8(i8Imm);
10532 IEM_MC_END();
10533 return VINF_SUCCESS;
10534}
10535
10536
10537/** Opcode 0xec */
10538FNIEMOP_DEF(iemOp_in_AL_DX)
10539{
10540 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10542 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10543}
10544
10545
10546/** Opcode 0xed */
10547FNIEMOP_DEF(iemOp_eAX_DX)
10548{
10549 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10551 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10552}
10553
10554
10555/** Opcode 0xee */
10556FNIEMOP_DEF(iemOp_out_DX_AL)
10557{
10558 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10560 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10561}
10562
10563
10564/** Opcode 0xef */
10565FNIEMOP_DEF(iemOp_out_DX_eAX)
10566{
10567 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10569 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10570}
10571
10572
10573/**
10574 * @opcode 0xf0
10575 */
10576FNIEMOP_DEF(iemOp_lock)
10577{
10578 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10579 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10580
10581 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10582 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10583}
10584
10585
10586/**
10587 * @opcode 0xf1
10588 */
10589FNIEMOP_DEF(iemOp_int1)
10590{
10591 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10592 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10593 /** @todo testcase! */
10594 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10595}
10596
10597
10598/**
10599 * @opcode 0xf2
10600 */
10601FNIEMOP_DEF(iemOp_repne)
10602{
10603 /* This overrides any previous REPE prefix. */
10604 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10605 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10606 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10607
10608 /* For the 4 entry opcode tables, REPNZ overrides any previous
10609 REPZ and operand size prefixes. */
10610 pVCpu->iem.s.idxPrefix = 3;
10611
10612 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10613 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10614}
10615
10616
10617/**
10618 * @opcode 0xf3
10619 */
10620FNIEMOP_DEF(iemOp_repe)
10621{
10622 /* This overrides any previous REPNE prefix. */
10623 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10624 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10625 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10626
10627 /* For the 4 entry opcode tables, REPNZ overrides any previous
10628 REPNZ and operand size prefixes. */
10629 pVCpu->iem.s.idxPrefix = 2;
10630
10631 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10632 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10633}
10634
10635
10636/**
10637 * @opcode 0xf4
10638 */
10639FNIEMOP_DEF(iemOp_hlt)
10640{
10641 IEMOP_MNEMONIC(hlt, "hlt");
10642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10643 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10644}
10645
10646
10647/**
10648 * @opcode 0xf5
10649 */
10650FNIEMOP_DEF(iemOp_cmc)
10651{
10652 IEMOP_MNEMONIC(cmc, "cmc");
10653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10654 IEM_MC_BEGIN(0, 0);
10655 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10656 IEM_MC_ADVANCE_RIP();
10657 IEM_MC_END();
10658 return VINF_SUCCESS;
10659}
10660
10661
10662/**
10663 * Common implementation of 'inc/dec/not/neg Eb'.
10664 *
10665 * @param bRm The RM byte.
10666 * @param pImpl The instruction implementation.
10667 */
10668FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10669{
10670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10671 {
10672 /* register access */
10673 IEM_MC_BEGIN(2, 0);
10674 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10675 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10676 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10677 IEM_MC_REF_EFLAGS(pEFlags);
10678 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10679 IEM_MC_ADVANCE_RIP();
10680 IEM_MC_END();
10681 }
10682 else
10683 {
10684 /* memory access. */
10685 IEM_MC_BEGIN(2, 2);
10686 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10687 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10689
10690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10691 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10692 IEM_MC_FETCH_EFLAGS(EFlags);
10693 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10694 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10695 else
10696 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10697
10698 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10699 IEM_MC_COMMIT_EFLAGS(EFlags);
10700 IEM_MC_ADVANCE_RIP();
10701 IEM_MC_END();
10702 }
10703 return VINF_SUCCESS;
10704}
10705
10706
10707/**
10708 * Common implementation of 'inc/dec/not/neg Ev'.
10709 *
10710 * @param bRm The RM byte.
10711 * @param pImpl The instruction implementation.
10712 */
10713FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10714{
10715 /* Registers are handled by a common worker. */
10716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10717 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10718
10719 /* Memory we do here. */
10720 switch (pVCpu->iem.s.enmEffOpSize)
10721 {
10722 case IEMMODE_16BIT:
10723 IEM_MC_BEGIN(2, 2);
10724 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10725 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10727
10728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10729 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10730 IEM_MC_FETCH_EFLAGS(EFlags);
10731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10732 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10733 else
10734 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10735
10736 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10737 IEM_MC_COMMIT_EFLAGS(EFlags);
10738 IEM_MC_ADVANCE_RIP();
10739 IEM_MC_END();
10740 return VINF_SUCCESS;
10741
10742 case IEMMODE_32BIT:
10743 IEM_MC_BEGIN(2, 2);
10744 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10745 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10747
10748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10749 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10750 IEM_MC_FETCH_EFLAGS(EFlags);
10751 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10752 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10753 else
10754 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10755
10756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10757 IEM_MC_COMMIT_EFLAGS(EFlags);
10758 IEM_MC_ADVANCE_RIP();
10759 IEM_MC_END();
10760 return VINF_SUCCESS;
10761
10762 case IEMMODE_64BIT:
10763 IEM_MC_BEGIN(2, 2);
10764 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10767
10768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10769 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10770 IEM_MC_FETCH_EFLAGS(EFlags);
10771 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10772 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10773 else
10774 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10775
10776 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10777 IEM_MC_COMMIT_EFLAGS(EFlags);
10778 IEM_MC_ADVANCE_RIP();
10779 IEM_MC_END();
10780 return VINF_SUCCESS;
10781
10782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10783 }
10784}
10785
10786
10787/** Opcode 0xf6 /0. */
10788FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10789{
10790 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10791 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10792
10793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10794 {
10795 /* register access */
10796 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10798
10799 IEM_MC_BEGIN(3, 0);
10800 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10801 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10802 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10803 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10804 IEM_MC_REF_EFLAGS(pEFlags);
10805 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10806 IEM_MC_ADVANCE_RIP();
10807 IEM_MC_END();
10808 }
10809 else
10810 {
10811 /* memory access. */
10812 IEM_MC_BEGIN(3, 2);
10813 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10814 IEM_MC_ARG(uint8_t, u8Src, 1);
10815 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10817
10818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10819 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10820 IEM_MC_ASSIGN(u8Src, u8Imm);
10821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10822 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10823 IEM_MC_FETCH_EFLAGS(EFlags);
10824 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10825
10826 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10827 IEM_MC_COMMIT_EFLAGS(EFlags);
10828 IEM_MC_ADVANCE_RIP();
10829 IEM_MC_END();
10830 }
10831 return VINF_SUCCESS;
10832}
10833
10834
10835/** Opcode 0xf7 /0. */
10836FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10837{
10838 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10840
10841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10842 {
10843 /* register access */
10844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10845 switch (pVCpu->iem.s.enmEffOpSize)
10846 {
10847 case IEMMODE_16BIT:
10848 {
10849 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10850 IEM_MC_BEGIN(3, 0);
10851 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10852 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10853 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10854 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10855 IEM_MC_REF_EFLAGS(pEFlags);
10856 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10857 IEM_MC_ADVANCE_RIP();
10858 IEM_MC_END();
10859 return VINF_SUCCESS;
10860 }
10861
10862 case IEMMODE_32BIT:
10863 {
10864 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10865 IEM_MC_BEGIN(3, 0);
10866 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10867 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10869 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10870 IEM_MC_REF_EFLAGS(pEFlags);
10871 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10872 /* No clearing the high dword here - test doesn't write back the result. */
10873 IEM_MC_ADVANCE_RIP();
10874 IEM_MC_END();
10875 return VINF_SUCCESS;
10876 }
10877
10878 case IEMMODE_64BIT:
10879 {
10880 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10881 IEM_MC_BEGIN(3, 0);
10882 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10883 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10885 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10886 IEM_MC_REF_EFLAGS(pEFlags);
10887 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10888 IEM_MC_ADVANCE_RIP();
10889 IEM_MC_END();
10890 return VINF_SUCCESS;
10891 }
10892
10893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10894 }
10895 }
10896 else
10897 {
10898 /* memory access. */
10899 switch (pVCpu->iem.s.enmEffOpSize)
10900 {
10901 case IEMMODE_16BIT:
10902 {
10903 IEM_MC_BEGIN(3, 2);
10904 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10905 IEM_MC_ARG(uint16_t, u16Src, 1);
10906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10908
10909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10910 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10911 IEM_MC_ASSIGN(u16Src, u16Imm);
10912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10913 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10914 IEM_MC_FETCH_EFLAGS(EFlags);
10915 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10916
10917 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10918 IEM_MC_COMMIT_EFLAGS(EFlags);
10919 IEM_MC_ADVANCE_RIP();
10920 IEM_MC_END();
10921 return VINF_SUCCESS;
10922 }
10923
10924 case IEMMODE_32BIT:
10925 {
10926 IEM_MC_BEGIN(3, 2);
10927 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10928 IEM_MC_ARG(uint32_t, u32Src, 1);
10929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10931
10932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10933 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10934 IEM_MC_ASSIGN(u32Src, u32Imm);
10935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10936 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10937 IEM_MC_FETCH_EFLAGS(EFlags);
10938 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10939
10940 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10941 IEM_MC_COMMIT_EFLAGS(EFlags);
10942 IEM_MC_ADVANCE_RIP();
10943 IEM_MC_END();
10944 return VINF_SUCCESS;
10945 }
10946
10947 case IEMMODE_64BIT:
10948 {
10949 IEM_MC_BEGIN(3, 2);
10950 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10951 IEM_MC_ARG(uint64_t, u64Src, 1);
10952 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10954
10955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10956 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10957 IEM_MC_ASSIGN(u64Src, u64Imm);
10958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10959 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10960 IEM_MC_FETCH_EFLAGS(EFlags);
10961 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10962
10963 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10964 IEM_MC_COMMIT_EFLAGS(EFlags);
10965 IEM_MC_ADVANCE_RIP();
10966 IEM_MC_END();
10967 return VINF_SUCCESS;
10968 }
10969
10970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10971 }
10972 }
10973}
10974
10975
10976/** Opcode 0xf6 /4, /5, /6 and /7. */
10977FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10978{
10979 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10980 {
10981 /* register access */
10982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10983 IEM_MC_BEGIN(3, 1);
10984 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10985 IEM_MC_ARG(uint8_t, u8Value, 1);
10986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10987 IEM_MC_LOCAL(int32_t, rc);
10988
10989 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10990 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10991 IEM_MC_REF_EFLAGS(pEFlags);
10992 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10993 IEM_MC_IF_LOCAL_IS_Z(rc) {
10994 IEM_MC_ADVANCE_RIP();
10995 } IEM_MC_ELSE() {
10996 IEM_MC_RAISE_DIVIDE_ERROR();
10997 } IEM_MC_ENDIF();
10998
10999 IEM_MC_END();
11000 }
11001 else
11002 {
11003 /* memory access. */
11004 IEM_MC_BEGIN(3, 2);
11005 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11006 IEM_MC_ARG(uint8_t, u8Value, 1);
11007 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11009 IEM_MC_LOCAL(int32_t, rc);
11010
11011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11013 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11014 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11015 IEM_MC_REF_EFLAGS(pEFlags);
11016 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11017 IEM_MC_IF_LOCAL_IS_Z(rc) {
11018 IEM_MC_ADVANCE_RIP();
11019 } IEM_MC_ELSE() {
11020 IEM_MC_RAISE_DIVIDE_ERROR();
11021 } IEM_MC_ENDIF();
11022
11023 IEM_MC_END();
11024 }
11025 return VINF_SUCCESS;
11026}
11027
11028
11029/** Opcode 0xf7 /4, /5, /6 and /7. */
11030FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11031{
11032 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11033
11034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11035 {
11036 /* register access */
11037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11038 switch (pVCpu->iem.s.enmEffOpSize)
11039 {
11040 case IEMMODE_16BIT:
11041 {
11042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11043 IEM_MC_BEGIN(4, 1);
11044 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11045 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11046 IEM_MC_ARG(uint16_t, u16Value, 2);
11047 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11048 IEM_MC_LOCAL(int32_t, rc);
11049
11050 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11051 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11052 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11053 IEM_MC_REF_EFLAGS(pEFlags);
11054 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11055 IEM_MC_IF_LOCAL_IS_Z(rc) {
11056 IEM_MC_ADVANCE_RIP();
11057 } IEM_MC_ELSE() {
11058 IEM_MC_RAISE_DIVIDE_ERROR();
11059 } IEM_MC_ENDIF();
11060
11061 IEM_MC_END();
11062 return VINF_SUCCESS;
11063 }
11064
11065 case IEMMODE_32BIT:
11066 {
11067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11068 IEM_MC_BEGIN(4, 1);
11069 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11070 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11071 IEM_MC_ARG(uint32_t, u32Value, 2);
11072 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11073 IEM_MC_LOCAL(int32_t, rc);
11074
11075 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11076 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11077 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11078 IEM_MC_REF_EFLAGS(pEFlags);
11079 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11080 IEM_MC_IF_LOCAL_IS_Z(rc) {
11081 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11082 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11083 IEM_MC_ADVANCE_RIP();
11084 } IEM_MC_ELSE() {
11085 IEM_MC_RAISE_DIVIDE_ERROR();
11086 } IEM_MC_ENDIF();
11087
11088 IEM_MC_END();
11089 return VINF_SUCCESS;
11090 }
11091
11092 case IEMMODE_64BIT:
11093 {
11094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11095 IEM_MC_BEGIN(4, 1);
11096 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11097 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11098 IEM_MC_ARG(uint64_t, u64Value, 2);
11099 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11100 IEM_MC_LOCAL(int32_t, rc);
11101
11102 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11103 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11104 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11105 IEM_MC_REF_EFLAGS(pEFlags);
11106 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11107 IEM_MC_IF_LOCAL_IS_Z(rc) {
11108 IEM_MC_ADVANCE_RIP();
11109 } IEM_MC_ELSE() {
11110 IEM_MC_RAISE_DIVIDE_ERROR();
11111 } IEM_MC_ENDIF();
11112
11113 IEM_MC_END();
11114 return VINF_SUCCESS;
11115 }
11116
11117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11118 }
11119 }
11120 else
11121 {
11122 /* memory access. */
11123 switch (pVCpu->iem.s.enmEffOpSize)
11124 {
11125 case IEMMODE_16BIT:
11126 {
11127 IEM_MC_BEGIN(4, 2);
11128 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11129 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11130 IEM_MC_ARG(uint16_t, u16Value, 2);
11131 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11133 IEM_MC_LOCAL(int32_t, rc);
11134
11135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11137 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11138 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11139 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11140 IEM_MC_REF_EFLAGS(pEFlags);
11141 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11142 IEM_MC_IF_LOCAL_IS_Z(rc) {
11143 IEM_MC_ADVANCE_RIP();
11144 } IEM_MC_ELSE() {
11145 IEM_MC_RAISE_DIVIDE_ERROR();
11146 } IEM_MC_ENDIF();
11147
11148 IEM_MC_END();
11149 return VINF_SUCCESS;
11150 }
11151
11152 case IEMMODE_32BIT:
11153 {
11154 IEM_MC_BEGIN(4, 2);
11155 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11156 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11157 IEM_MC_ARG(uint32_t, u32Value, 2);
11158 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11160 IEM_MC_LOCAL(int32_t, rc);
11161
11162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11164 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11165 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11166 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11167 IEM_MC_REF_EFLAGS(pEFlags);
11168 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11169 IEM_MC_IF_LOCAL_IS_Z(rc) {
11170 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11171 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11172 IEM_MC_ADVANCE_RIP();
11173 } IEM_MC_ELSE() {
11174 IEM_MC_RAISE_DIVIDE_ERROR();
11175 } IEM_MC_ENDIF();
11176
11177 IEM_MC_END();
11178 return VINF_SUCCESS;
11179 }
11180
11181 case IEMMODE_64BIT:
11182 {
11183 IEM_MC_BEGIN(4, 2);
11184 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11185 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11186 IEM_MC_ARG(uint64_t, u64Value, 2);
11187 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11189 IEM_MC_LOCAL(int32_t, rc);
11190
11191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11193 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11194 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11195 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11196 IEM_MC_REF_EFLAGS(pEFlags);
11197 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11198 IEM_MC_IF_LOCAL_IS_Z(rc) {
11199 IEM_MC_ADVANCE_RIP();
11200 } IEM_MC_ELSE() {
11201 IEM_MC_RAISE_DIVIDE_ERROR();
11202 } IEM_MC_ENDIF();
11203
11204 IEM_MC_END();
11205 return VINF_SUCCESS;
11206 }
11207
11208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11209 }
11210 }
11211}
11212
11213/**
11214 * @opcode 0xf6
11215 */
11216FNIEMOP_DEF(iemOp_Grp3_Eb)
11217{
11218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11219 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11220 {
11221 case 0:
11222 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11223 case 1:
11224/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11225 return IEMOP_RAISE_INVALID_OPCODE();
11226 case 2:
11227 IEMOP_MNEMONIC(not_Eb, "not Eb");
11228 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11229 case 3:
11230 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11231 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11232 case 4:
11233 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11235 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11236 case 5:
11237 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11238 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11239 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11240 case 6:
11241 IEMOP_MNEMONIC(div_Eb, "div Eb");
11242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11243 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11244 case 7:
11245 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11246 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11247 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11249 }
11250}
11251
11252
11253/**
11254 * @opcode 0xf7
11255 */
11256FNIEMOP_DEF(iemOp_Grp3_Ev)
11257{
11258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11259 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11260 {
11261 case 0:
11262 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11263 case 1:
11264/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11265 return IEMOP_RAISE_INVALID_OPCODE();
11266 case 2:
11267 IEMOP_MNEMONIC(not_Ev, "not Ev");
11268 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11269 case 3:
11270 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11271 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11272 case 4:
11273 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11274 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11275 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11276 case 5:
11277 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11279 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11280 case 6:
11281 IEMOP_MNEMONIC(div_Ev, "div Ev");
11282 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11283 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11284 case 7:
11285 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11287 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11289 }
11290}
11291
11292
11293/**
11294 * @opcode 0xf8
11295 */
11296FNIEMOP_DEF(iemOp_clc)
11297{
11298 IEMOP_MNEMONIC(clc, "clc");
11299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11300 IEM_MC_BEGIN(0, 0);
11301 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11302 IEM_MC_ADVANCE_RIP();
11303 IEM_MC_END();
11304 return VINF_SUCCESS;
11305}
11306
11307
11308/**
11309 * @opcode 0xf9
11310 */
11311FNIEMOP_DEF(iemOp_stc)
11312{
11313 IEMOP_MNEMONIC(stc, "stc");
11314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11315 IEM_MC_BEGIN(0, 0);
11316 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11317 IEM_MC_ADVANCE_RIP();
11318 IEM_MC_END();
11319 return VINF_SUCCESS;
11320}
11321
11322
11323/**
11324 * @opcode 0xfa
11325 */
11326FNIEMOP_DEF(iemOp_cli)
11327{
11328 IEMOP_MNEMONIC(cli, "cli");
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11331}
11332
11333
11334FNIEMOP_DEF(iemOp_sti)
11335{
11336 IEMOP_MNEMONIC(sti, "sti");
11337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11339}
11340
11341
11342/**
11343 * @opcode 0xfc
11344 */
11345FNIEMOP_DEF(iemOp_cld)
11346{
11347 IEMOP_MNEMONIC(cld, "cld");
11348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11349 IEM_MC_BEGIN(0, 0);
11350 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11351 IEM_MC_ADVANCE_RIP();
11352 IEM_MC_END();
11353 return VINF_SUCCESS;
11354}
11355
11356
11357/**
11358 * @opcode 0xfd
11359 */
11360FNIEMOP_DEF(iemOp_std)
11361{
11362 IEMOP_MNEMONIC(std, "std");
11363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11364 IEM_MC_BEGIN(0, 0);
11365 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11366 IEM_MC_ADVANCE_RIP();
11367 IEM_MC_END();
11368 return VINF_SUCCESS;
11369}
11370
11371
11372/**
11373 * @opcode 0xfe
11374 */
11375FNIEMOP_DEF(iemOp_Grp4)
11376{
11377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11378 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11379 {
11380 case 0:
11381 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11382 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11383 case 1:
11384 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11385 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11386 default:
11387 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11388 return IEMOP_RAISE_INVALID_OPCODE();
11389 }
11390}
11391
11392
11393/**
11394 * Opcode 0xff /2.
11395 * @param bRm The RM byte.
11396 */
11397FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11398{
11399 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11400 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11401
11402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11403 {
11404 /* The new RIP is taken from a register. */
11405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11406 switch (pVCpu->iem.s.enmEffOpSize)
11407 {
11408 case IEMMODE_16BIT:
11409 IEM_MC_BEGIN(1, 0);
11410 IEM_MC_ARG(uint16_t, u16Target, 0);
11411 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11412 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11413 IEM_MC_END()
11414 return VINF_SUCCESS;
11415
11416 case IEMMODE_32BIT:
11417 IEM_MC_BEGIN(1, 0);
11418 IEM_MC_ARG(uint32_t, u32Target, 0);
11419 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11420 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11421 IEM_MC_END()
11422 return VINF_SUCCESS;
11423
11424 case IEMMODE_64BIT:
11425 IEM_MC_BEGIN(1, 0);
11426 IEM_MC_ARG(uint64_t, u64Target, 0);
11427 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11428 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11429 IEM_MC_END()
11430 return VINF_SUCCESS;
11431
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 }
11435 else
11436 {
11437 /* The new RIP is taken from a register. */
11438 switch (pVCpu->iem.s.enmEffOpSize)
11439 {
11440 case IEMMODE_16BIT:
11441 IEM_MC_BEGIN(1, 1);
11442 IEM_MC_ARG(uint16_t, u16Target, 0);
11443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11446 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11447 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11448 IEM_MC_END()
11449 return VINF_SUCCESS;
11450
11451 case IEMMODE_32BIT:
11452 IEM_MC_BEGIN(1, 1);
11453 IEM_MC_ARG(uint32_t, u32Target, 0);
11454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11457 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11458 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11459 IEM_MC_END()
11460 return VINF_SUCCESS;
11461
11462 case IEMMODE_64BIT:
11463 IEM_MC_BEGIN(1, 1);
11464 IEM_MC_ARG(uint64_t, u64Target, 0);
11465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11468 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11469 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11470 IEM_MC_END()
11471 return VINF_SUCCESS;
11472
11473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11474 }
11475 }
11476}
11477
11478typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11479
11480FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11481{
11482 /* Registers? How?? */
11483 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11484 { /* likely */ }
11485 else
11486 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11487
11488 /* Far pointer loaded from memory. */
11489 switch (pVCpu->iem.s.enmEffOpSize)
11490 {
11491 case IEMMODE_16BIT:
11492 IEM_MC_BEGIN(3, 1);
11493 IEM_MC_ARG(uint16_t, u16Sel, 0);
11494 IEM_MC_ARG(uint16_t, offSeg, 1);
11495 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11499 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11500 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11501 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11502 IEM_MC_END();
11503 return VINF_SUCCESS;
11504
11505 case IEMMODE_64BIT:
11506 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11507 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11508 * and call far qword [rsp] encodings. */
11509 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11510 {
11511 IEM_MC_BEGIN(3, 1);
11512 IEM_MC_ARG(uint16_t, u16Sel, 0);
11513 IEM_MC_ARG(uint64_t, offSeg, 1);
11514 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11518 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11519 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11520 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11521 IEM_MC_END();
11522 return VINF_SUCCESS;
11523 }
11524 /* AMD falls thru. */
11525 RT_FALL_THRU();
11526
11527 case IEMMODE_32BIT:
11528 IEM_MC_BEGIN(3, 1);
11529 IEM_MC_ARG(uint16_t, u16Sel, 0);
11530 IEM_MC_ARG(uint32_t, offSeg, 1);
11531 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11536 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11537 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11538 IEM_MC_END();
11539 return VINF_SUCCESS;
11540
11541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11542 }
11543}
11544
11545
11546/**
11547 * Opcode 0xff /3.
11548 * @param bRm The RM byte.
11549 */
11550FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11551{
11552 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11553 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11554}
11555
11556
11557/**
11558 * Opcode 0xff /4.
11559 * @param bRm The RM byte.
11560 */
11561FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11562{
11563 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11564 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11565
11566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11567 {
11568 /* The new RIP is taken from a register. */
11569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11570 switch (pVCpu->iem.s.enmEffOpSize)
11571 {
11572 case IEMMODE_16BIT:
11573 IEM_MC_BEGIN(0, 1);
11574 IEM_MC_LOCAL(uint16_t, u16Target);
11575 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11576 IEM_MC_SET_RIP_U16(u16Target);
11577 IEM_MC_END()
11578 return VINF_SUCCESS;
11579
11580 case IEMMODE_32BIT:
11581 IEM_MC_BEGIN(0, 1);
11582 IEM_MC_LOCAL(uint32_t, u32Target);
11583 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11584 IEM_MC_SET_RIP_U32(u32Target);
11585 IEM_MC_END()
11586 return VINF_SUCCESS;
11587
11588 case IEMMODE_64BIT:
11589 IEM_MC_BEGIN(0, 1);
11590 IEM_MC_LOCAL(uint64_t, u64Target);
11591 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11592 IEM_MC_SET_RIP_U64(u64Target);
11593 IEM_MC_END()
11594 return VINF_SUCCESS;
11595
11596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11597 }
11598 }
11599 else
11600 {
11601 /* The new RIP is taken from a memory location. */
11602 switch (pVCpu->iem.s.enmEffOpSize)
11603 {
11604 case IEMMODE_16BIT:
11605 IEM_MC_BEGIN(0, 2);
11606 IEM_MC_LOCAL(uint16_t, u16Target);
11607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11611 IEM_MC_SET_RIP_U16(u16Target);
11612 IEM_MC_END()
11613 return VINF_SUCCESS;
11614
11615 case IEMMODE_32BIT:
11616 IEM_MC_BEGIN(0, 2);
11617 IEM_MC_LOCAL(uint32_t, u32Target);
11618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11621 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11622 IEM_MC_SET_RIP_U32(u32Target);
11623 IEM_MC_END()
11624 return VINF_SUCCESS;
11625
11626 case IEMMODE_64BIT:
11627 IEM_MC_BEGIN(0, 2);
11628 IEM_MC_LOCAL(uint64_t, u64Target);
11629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11632 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11633 IEM_MC_SET_RIP_U64(u64Target);
11634 IEM_MC_END()
11635 return VINF_SUCCESS;
11636
11637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11638 }
11639 }
11640}
11641
11642
11643/**
11644 * Opcode 0xff /5.
11645 * @param bRm The RM byte.
11646 */
11647FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11648{
11649 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11650 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11651}
11652
11653
11654/**
11655 * Opcode 0xff /6.
11656 * @param bRm The RM byte.
11657 */
11658FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11659{
11660 IEMOP_MNEMONIC(push_Ev, "push Ev");
11661
11662 /* Registers are handled by a common worker. */
11663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11664 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11665
11666 /* Memory we do here. */
11667 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11668 switch (pVCpu->iem.s.enmEffOpSize)
11669 {
11670 case IEMMODE_16BIT:
11671 IEM_MC_BEGIN(0, 2);
11672 IEM_MC_LOCAL(uint16_t, u16Src);
11673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11676 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11677 IEM_MC_PUSH_U16(u16Src);
11678 IEM_MC_ADVANCE_RIP();
11679 IEM_MC_END();
11680 return VINF_SUCCESS;
11681
11682 case IEMMODE_32BIT:
11683 IEM_MC_BEGIN(0, 2);
11684 IEM_MC_LOCAL(uint32_t, u32Src);
11685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11688 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11689 IEM_MC_PUSH_U32(u32Src);
11690 IEM_MC_ADVANCE_RIP();
11691 IEM_MC_END();
11692 return VINF_SUCCESS;
11693
11694 case IEMMODE_64BIT:
11695 IEM_MC_BEGIN(0, 2);
11696 IEM_MC_LOCAL(uint64_t, u64Src);
11697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11700 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11701 IEM_MC_PUSH_U64(u64Src);
11702 IEM_MC_ADVANCE_RIP();
11703 IEM_MC_END();
11704 return VINF_SUCCESS;
11705
11706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11707 }
11708}
11709
11710
11711/**
11712 * @opcode 0xff
11713 */
11714FNIEMOP_DEF(iemOp_Grp5)
11715{
11716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11717 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11718 {
11719 case 0:
11720 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11721 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11722 case 1:
11723 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11724 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11725 case 2:
11726 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11727 case 3:
11728 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11729 case 4:
11730 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11731 case 5:
11732 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11733 case 6:
11734 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11735 case 7:
11736 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11737 return IEMOP_RAISE_INVALID_OPCODE();
11738 }
11739 AssertFailedReturn(VERR_IEM_IPE_3);
11740}
11741
11742
11743
11744const PFNIEMOP g_apfnOneByteMap[256] =
11745{
11746 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11747 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11748 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11749 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11750 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11751 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11752 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11753 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11754 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11755 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11756 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11757 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11758 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11759 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11760 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11761 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11762 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11763 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11764 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11765 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11766 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11767 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11768 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11769 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11770 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11771 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11772 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11773 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11774 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11775 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11776 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11777 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11778 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11779 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11780 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11781 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11782 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11783 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11784 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11785 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11786 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11787 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11788 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11789 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11790 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11791 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11792 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11793 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11794 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11795 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11796 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11797 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11798 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11799 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11800 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11801 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11802 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11803 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11804 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11805 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11806 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11807 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11808 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11809 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11810};
11811
11812
11813/** @} */
11814
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette