VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66419

Last change on this file since 66419 was 66419, checked in by vboxsync, 8 years ago

IEM: More vstmxcsr work.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 389.8 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66419 2017-04-04 15:49:07Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_size
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
910 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
912 * @optest efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
913 * @optest efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
914 * @optest efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
915 * @optest efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
916 * @optest efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
917 * @optest efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
918 * @optest efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
919 * @optest efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
920 * @optest efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
921 * @optest efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
922 * @optest efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
923 */
924FNIEMOP_DEF(iemOp_aaa)
925{
926 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
927 IEMOP_HLP_NO_64BIT();
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
930
931 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
932}
933
934
935/**
936 * @opcode 0x38
937 */
938FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
939{
940 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
941 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
942}
943
944
945/**
946 * @opcode 0x39
947 */
948FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
949{
950 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
951 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
952}
953
954
955/**
956 * @opcode 0x3a
957 */
958FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
959{
960 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
961 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
962}
963
964
965/**
966 * @opcode 0x3b
967 */
968FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
969{
970 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
971 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
972}
973
974
975/**
976 * @opcode 0x3c
977 */
978FNIEMOP_DEF(iemOp_cmp_Al_Ib)
979{
980 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
981 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
982}
983
984
985/**
986 * @opcode 0x3d
987 */
988FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
989{
990 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
991 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
992}
993
994
995/**
996 * @opcode 0x3e
997 */
998FNIEMOP_DEF(iemOp_seg_DS)
999{
1000 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1001 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1002 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1003
1004 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1005 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1006}
1007
1008
1009/**
1010 * @opcode 0x3f
1011 * @opfltest af,cf
1012 * @opflmodify cf,pf,af,zf,sf,of
1013 * @opflundef pf,zf,sf,of
1014 * @opgroup og_gen_arith_dec
1015 * @optest efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1016 * @optest efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1017 * @optest efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1018 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1019 * @optest efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1020 * @optest efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1021 * @optest efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1022 * @optest efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1023 * @optest efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1024 * @optest efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1025 * @optest efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1026 * @optest efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1027 * @optest efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1028 * @optest efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1029 * @optest efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1030 * @optest efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1031 * @optest efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1032 * @optest efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1033 * @optest efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1034 */
1035FNIEMOP_DEF(iemOp_aas)
1036{
1037 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1038 IEMOP_HLP_NO_64BIT();
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1041
1042 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1043}
1044
1045
1046/**
1047 * Common 'inc/dec/not/neg register' helper.
1048 */
1049FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1050{
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 switch (pVCpu->iem.s.enmEffOpSize)
1053 {
1054 case IEMMODE_16BIT:
1055 IEM_MC_BEGIN(2, 0);
1056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1057 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1058 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1059 IEM_MC_REF_EFLAGS(pEFlags);
1060 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1061 IEM_MC_ADVANCE_RIP();
1062 IEM_MC_END();
1063 return VINF_SUCCESS;
1064
1065 case IEMMODE_32BIT:
1066 IEM_MC_BEGIN(2, 0);
1067 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1068 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1069 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1070 IEM_MC_REF_EFLAGS(pEFlags);
1071 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1072 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1073 IEM_MC_ADVANCE_RIP();
1074 IEM_MC_END();
1075 return VINF_SUCCESS;
1076
1077 case IEMMODE_64BIT:
1078 IEM_MC_BEGIN(2, 0);
1079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1080 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1081 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1082 IEM_MC_REF_EFLAGS(pEFlags);
1083 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1084 IEM_MC_ADVANCE_RIP();
1085 IEM_MC_END();
1086 return VINF_SUCCESS;
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/**
1093 * @opcode 0x40
1094 */
1095FNIEMOP_DEF(iemOp_inc_eAX)
1096{
1097 /*
1098 * This is a REX prefix in 64-bit mode.
1099 */
1100 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1101 {
1102 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1103 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1104
1105 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1106 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1107 }
1108
1109 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1110 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1111}
1112
1113
1114/**
1115 * @opcode 0x41
1116 */
1117FNIEMOP_DEF(iemOp_inc_eCX)
1118{
1119 /*
1120 * This is a REX prefix in 64-bit mode.
1121 */
1122 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1123 {
1124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1126 pVCpu->iem.s.uRexB = 1 << 3;
1127
1128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1130 }
1131
1132 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1133 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1134}
1135
1136
1137/**
1138 * @opcode 0x42
1139 */
1140FNIEMOP_DEF(iemOp_inc_eDX)
1141{
1142 /*
1143 * This is a REX prefix in 64-bit mode.
1144 */
1145 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1146 {
1147 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1148 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1149 pVCpu->iem.s.uRexIndex = 1 << 3;
1150
1151 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1152 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1153 }
1154
1155 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1156 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1157}
1158
1159
1160
1161/**
1162 * @opcode 0x43
1163 */
1164FNIEMOP_DEF(iemOp_inc_eBX)
1165{
1166 /*
1167 * This is a REX prefix in 64-bit mode.
1168 */
1169 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1170 {
1171 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1172 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1173 pVCpu->iem.s.uRexB = 1 << 3;
1174 pVCpu->iem.s.uRexIndex = 1 << 3;
1175
1176 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1177 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1178 }
1179
1180 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1181 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1182}
1183
1184
1185/**
1186 * @opcode 0x44
1187 */
1188FNIEMOP_DEF(iemOp_inc_eSP)
1189{
1190 /*
1191 * This is a REX prefix in 64-bit mode.
1192 */
1193 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1194 {
1195 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1196 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1197 pVCpu->iem.s.uRexReg = 1 << 3;
1198
1199 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1200 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1201 }
1202
1203 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1204 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1205}
1206
1207
1208/**
1209 * @opcode 0x45
1210 */
1211FNIEMOP_DEF(iemOp_inc_eBP)
1212{
1213 /*
1214 * This is a REX prefix in 64-bit mode.
1215 */
1216 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1217 {
1218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1219 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1220 pVCpu->iem.s.uRexReg = 1 << 3;
1221 pVCpu->iem.s.uRexB = 1 << 3;
1222
1223 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1224 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1225 }
1226
1227 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1228 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1229}
1230
1231
1232/**
1233 * @opcode 0x46
1234 */
1235FNIEMOP_DEF(iemOp_inc_eSI)
1236{
1237 /*
1238 * This is a REX prefix in 64-bit mode.
1239 */
1240 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1241 {
1242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1243 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1244 pVCpu->iem.s.uRexReg = 1 << 3;
1245 pVCpu->iem.s.uRexIndex = 1 << 3;
1246
1247 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1248 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1249 }
1250
1251 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1252 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1253}
1254
1255
1256/**
1257 * @opcode 0x47
1258 */
1259FNIEMOP_DEF(iemOp_inc_eDI)
1260{
1261 /*
1262 * This is a REX prefix in 64-bit mode.
1263 */
1264 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1265 {
1266 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1267 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1268 pVCpu->iem.s.uRexReg = 1 << 3;
1269 pVCpu->iem.s.uRexB = 1 << 3;
1270 pVCpu->iem.s.uRexIndex = 1 << 3;
1271
1272 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1273 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1274 }
1275
1276 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1277 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1278}
1279
1280
1281/**
1282 * @opcode 0x48
1283 */
1284FNIEMOP_DEF(iemOp_dec_eAX)
1285{
1286 /*
1287 * This is a REX prefix in 64-bit mode.
1288 */
1289 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1290 {
1291 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1292 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1293 iemRecalEffOpSize(pVCpu);
1294
1295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1297 }
1298
1299 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1300 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1301}
1302
1303
1304/**
1305 * @opcode 0x49
1306 */
1307FNIEMOP_DEF(iemOp_dec_eCX)
1308{
1309 /*
1310 * This is a REX prefix in 64-bit mode.
1311 */
1312 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1313 {
1314 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1315 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1316 pVCpu->iem.s.uRexB = 1 << 3;
1317 iemRecalEffOpSize(pVCpu);
1318
1319 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1320 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1321 }
1322
1323 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1324 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1325}
1326
1327
1328/**
1329 * @opcode 0x4a
1330 */
1331FNIEMOP_DEF(iemOp_dec_eDX)
1332{
1333 /*
1334 * This is a REX prefix in 64-bit mode.
1335 */
1336 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1337 {
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1340 pVCpu->iem.s.uRexIndex = 1 << 3;
1341 iemRecalEffOpSize(pVCpu);
1342
1343 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1344 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1345 }
1346
1347 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1348 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1349}
1350
1351
1352/**
1353 * @opcode 0x4b
1354 */
1355FNIEMOP_DEF(iemOp_dec_eBX)
1356{
1357 /*
1358 * This is a REX prefix in 64-bit mode.
1359 */
1360 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1361 {
1362 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1363 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1364 pVCpu->iem.s.uRexB = 1 << 3;
1365 pVCpu->iem.s.uRexIndex = 1 << 3;
1366 iemRecalEffOpSize(pVCpu);
1367
1368 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1369 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1370 }
1371
1372 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1373 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1374}
1375
1376
1377/**
1378 * @opcode 0x4c
1379 */
1380FNIEMOP_DEF(iemOp_dec_eSP)
1381{
1382 /*
1383 * This is a REX prefix in 64-bit mode.
1384 */
1385 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1386 {
1387 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1388 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1389 pVCpu->iem.s.uRexReg = 1 << 3;
1390 iemRecalEffOpSize(pVCpu);
1391
1392 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1393 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1394 }
1395
1396 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1397 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1398}
1399
1400
1401/**
1402 * @opcode 0x4d
1403 */
1404FNIEMOP_DEF(iemOp_dec_eBP)
1405{
1406 /*
1407 * This is a REX prefix in 64-bit mode.
1408 */
1409 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1410 {
1411 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1412 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1413 pVCpu->iem.s.uRexReg = 1 << 3;
1414 pVCpu->iem.s.uRexB = 1 << 3;
1415 iemRecalEffOpSize(pVCpu);
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419 }
1420
1421 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1422 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1423}
1424
1425
1426/**
1427 * @opcode 0x4e
1428 */
1429FNIEMOP_DEF(iemOp_dec_eSI)
1430{
1431 /*
1432 * This is a REX prefix in 64-bit mode.
1433 */
1434 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1435 {
1436 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1437 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1438 pVCpu->iem.s.uRexReg = 1 << 3;
1439 pVCpu->iem.s.uRexIndex = 1 << 3;
1440 iemRecalEffOpSize(pVCpu);
1441
1442 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1443 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1444 }
1445
1446 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1447 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1448}
1449
1450
1451/**
1452 * @opcode 0x4f
1453 */
1454FNIEMOP_DEF(iemOp_dec_eDI)
1455{
1456 /*
1457 * This is a REX prefix in 64-bit mode.
1458 */
1459 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1460 {
1461 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1462 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1463 pVCpu->iem.s.uRexReg = 1 << 3;
1464 pVCpu->iem.s.uRexB = 1 << 3;
1465 pVCpu->iem.s.uRexIndex = 1 << 3;
1466 iemRecalEffOpSize(pVCpu);
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470 }
1471
1472 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1473 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1474}
1475
1476
1477/**
1478 * Common 'push register' helper.
1479 */
1480FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1481{
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1483 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1484 {
1485 iReg |= pVCpu->iem.s.uRexB;
1486 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1487 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1488 }
1489
1490 switch (pVCpu->iem.s.enmEffOpSize)
1491 {
1492 case IEMMODE_16BIT:
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(uint16_t, u16Value);
1495 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1496 IEM_MC_PUSH_U16(u16Value);
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 break;
1500
1501 case IEMMODE_32BIT:
1502 IEM_MC_BEGIN(0, 1);
1503 IEM_MC_LOCAL(uint32_t, u32Value);
1504 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1505 IEM_MC_PUSH_U32(u32Value);
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 break;
1509
1510 case IEMMODE_64BIT:
1511 IEM_MC_BEGIN(0, 1);
1512 IEM_MC_LOCAL(uint64_t, u64Value);
1513 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1514 IEM_MC_PUSH_U64(u64Value);
1515 IEM_MC_ADVANCE_RIP();
1516 IEM_MC_END();
1517 break;
1518 }
1519
1520 return VINF_SUCCESS;
1521}
1522
1523
1524/**
1525 * @opcode 0x50
1526 */
1527FNIEMOP_DEF(iemOp_push_eAX)
1528{
1529 IEMOP_MNEMONIC(push_rAX, "push rAX");
1530 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1531}
1532
1533
1534/**
1535 * @opcode 0x51
1536 */
1537FNIEMOP_DEF(iemOp_push_eCX)
1538{
1539 IEMOP_MNEMONIC(push_rCX, "push rCX");
1540 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1541}
1542
1543
1544/**
1545 * @opcode 0x52
1546 */
1547FNIEMOP_DEF(iemOp_push_eDX)
1548{
1549 IEMOP_MNEMONIC(push_rDX, "push rDX");
1550 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1551}
1552
1553
1554/**
1555 * @opcode 0x53
1556 */
1557FNIEMOP_DEF(iemOp_push_eBX)
1558{
1559 IEMOP_MNEMONIC(push_rBX, "push rBX");
1560 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1561}
1562
1563
1564/**
1565 * @opcode 0x54
1566 */
1567FNIEMOP_DEF(iemOp_push_eSP)
1568{
1569 IEMOP_MNEMONIC(push_rSP, "push rSP");
1570 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1571 {
1572 IEM_MC_BEGIN(0, 1);
1573 IEM_MC_LOCAL(uint16_t, u16Value);
1574 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1575 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1576 IEM_MC_PUSH_U16(u16Value);
1577 IEM_MC_ADVANCE_RIP();
1578 IEM_MC_END();
1579 }
1580 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1581}
1582
1583
1584/**
1585 * @opcode 0x55
1586 */
1587FNIEMOP_DEF(iemOp_push_eBP)
1588{
1589 IEMOP_MNEMONIC(push_rBP, "push rBP");
1590 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1591}
1592
1593
1594/**
1595 * @opcode 0x56
1596 */
1597FNIEMOP_DEF(iemOp_push_eSI)
1598{
1599 IEMOP_MNEMONIC(push_rSI, "push rSI");
1600 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1601}
1602
1603
1604/**
1605 * @opcode 0x57
1606 */
1607FNIEMOP_DEF(iemOp_push_eDI)
1608{
1609 IEMOP_MNEMONIC(push_rDI, "push rDI");
1610 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1611}
1612
1613
1614/**
1615 * Common 'pop register' helper.
1616 */
1617FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1618{
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1620 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1621 {
1622 iReg |= pVCpu->iem.s.uRexB;
1623 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1624 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1625 }
1626
1627 switch (pVCpu->iem.s.enmEffOpSize)
1628 {
1629 case IEMMODE_16BIT:
1630 IEM_MC_BEGIN(0, 1);
1631 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1632 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1633 IEM_MC_POP_U16(pu16Dst);
1634 IEM_MC_ADVANCE_RIP();
1635 IEM_MC_END();
1636 break;
1637
1638 case IEMMODE_32BIT:
1639 IEM_MC_BEGIN(0, 1);
1640 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1641 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1642 IEM_MC_POP_U32(pu32Dst);
1643 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1644 IEM_MC_ADVANCE_RIP();
1645 IEM_MC_END();
1646 break;
1647
1648 case IEMMODE_64BIT:
1649 IEM_MC_BEGIN(0, 1);
1650 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1651 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1652 IEM_MC_POP_U64(pu64Dst);
1653 IEM_MC_ADVANCE_RIP();
1654 IEM_MC_END();
1655 break;
1656 }
1657
1658 return VINF_SUCCESS;
1659}
1660
1661
1662/**
1663 * @opcode 0x58
1664 */
1665FNIEMOP_DEF(iemOp_pop_eAX)
1666{
1667 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1668 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1669}
1670
1671
1672/**
1673 * @opcode 0x59
1674 */
1675FNIEMOP_DEF(iemOp_pop_eCX)
1676{
1677 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1678 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1679}
1680
1681
1682/**
1683 * @opcode 0x5a
1684 */
1685FNIEMOP_DEF(iemOp_pop_eDX)
1686{
1687 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1688 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1689}
1690
1691
1692/**
1693 * @opcode 0x5b
1694 */
1695FNIEMOP_DEF(iemOp_pop_eBX)
1696{
1697 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1698 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1699}
1700
1701
1702/**
1703 * @opcode 0x5c
1704 */
1705FNIEMOP_DEF(iemOp_pop_eSP)
1706{
1707 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1708 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1709 {
1710 if (pVCpu->iem.s.uRexB)
1711 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1712 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1713 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1714 }
1715
1716 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1717 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1718 /** @todo add testcase for this instruction. */
1719 switch (pVCpu->iem.s.enmEffOpSize)
1720 {
1721 case IEMMODE_16BIT:
1722 IEM_MC_BEGIN(0, 1);
1723 IEM_MC_LOCAL(uint16_t, u16Dst);
1724 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1725 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1726 IEM_MC_ADVANCE_RIP();
1727 IEM_MC_END();
1728 break;
1729
1730 case IEMMODE_32BIT:
1731 IEM_MC_BEGIN(0, 1);
1732 IEM_MC_LOCAL(uint32_t, u32Dst);
1733 IEM_MC_POP_U32(&u32Dst);
1734 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1735 IEM_MC_ADVANCE_RIP();
1736 IEM_MC_END();
1737 break;
1738
1739 case IEMMODE_64BIT:
1740 IEM_MC_BEGIN(0, 1);
1741 IEM_MC_LOCAL(uint64_t, u64Dst);
1742 IEM_MC_POP_U64(&u64Dst);
1743 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1744 IEM_MC_ADVANCE_RIP();
1745 IEM_MC_END();
1746 break;
1747 }
1748
1749 return VINF_SUCCESS;
1750}
1751
1752
1753/**
1754 * @opcode 0x5d
1755 */
1756FNIEMOP_DEF(iemOp_pop_eBP)
1757{
1758 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1759 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1760}
1761
1762
1763/**
1764 * @opcode 0x5e
1765 */
1766FNIEMOP_DEF(iemOp_pop_eSI)
1767{
1768 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1769 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1770}
1771
1772
1773/**
1774 * @opcode 0x5f
1775 */
1776FNIEMOP_DEF(iemOp_pop_eDI)
1777{
1778 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1779 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1780}
1781
1782
1783/**
1784 * @opcode 0x60
1785 */
1786FNIEMOP_DEF(iemOp_pusha)
1787{
1788 IEMOP_MNEMONIC(pusha, "pusha");
1789 IEMOP_HLP_MIN_186();
1790 IEMOP_HLP_NO_64BIT();
1791 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1793 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1794 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1795}
1796
1797
1798/**
1799 * @opcode 0x61
1800 */
1801FNIEMOP_DEF(iemOp_popa__mvex)
1802{
1803 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1804 {
1805 IEMOP_MNEMONIC(popa, "popa");
1806 IEMOP_HLP_MIN_186();
1807 IEMOP_HLP_NO_64BIT();
1808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1809 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1810 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1812 }
1813 IEMOP_MNEMONIC(mvex, "mvex");
1814 Log(("mvex prefix is not supported!\n"));
1815 return IEMOP_RAISE_INVALID_OPCODE();
1816}
1817
1818
1819/**
1820 * @opcode 0x62
1821 * @opmnemonic bound
1822 * @op1 Gv
1823 * @op2 Ma
1824 * @opmincpu 80186
1825 * @ophints harmless invalid_64
1826 * @optest op1=0 op2=0 ->
1827 * @optest op1=1 op2=0 -> value.xcpt=5
1828 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1829 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1830 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1831 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1832 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1833 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1834 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1835 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1836 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1837 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1838 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1839 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1840 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1841 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1842 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1843 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1844 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1845 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1846 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1847 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1848 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1849 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1850 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1851 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1852 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1853 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1854 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1855 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1856 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1857 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1858 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1859 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1860 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1861 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1862 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1863 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1864 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1865 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1866 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1867 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1868 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1869 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1870 */
1871FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1872{
1873 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1874 compatability mode it is invalid with MOD=3.
1875
1876 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1877 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1878 given as R and X without an exact description, so we assume it builds on
1879 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1880 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1881 uint8_t bRm;
1882 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1883 {
1884 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1885 IEMOP_HLP_MIN_186();
1886 IEM_OPCODE_GET_NEXT_U8(&bRm);
1887 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1888 {
1889 /** @todo testcase: check that there are two memory accesses involved. Check
1890 * whether they're both read before the \#BR triggers. */
1891 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1892 {
1893 IEM_MC_BEGIN(3, 1);
1894 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1895 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1896 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1898
1899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1901
1902 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1903 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1904 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1905
1906 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1907 IEM_MC_END();
1908 }
1909 else /* 32-bit operands */
1910 {
1911 IEM_MC_BEGIN(3, 1);
1912 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1913 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1914 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1916
1917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1919
1920 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1921 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1922 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1923
1924 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1925 IEM_MC_END();
1926 }
1927 }
1928
1929 /*
1930 * @opdone
1931 */
1932 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1933 {
1934 /* Note that there is no need for the CPU to fetch further bytes
1935 here because MODRM.MOD == 3. */
1936 Log(("evex not supported by the guest CPU!\n"));
1937 return IEMOP_RAISE_INVALID_OPCODE();
1938 }
1939 }
1940 else
1941 {
1942 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1943 * does modr/m read, whereas AMD probably doesn't... */
1944 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1945 {
1946 Log(("evex not supported by the guest CPU!\n"));
1947 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
1948 }
1949 IEM_OPCODE_GET_NEXT_U8(&bRm);
1950 }
1951
1952 IEMOP_MNEMONIC(evex, "evex");
1953 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1954 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1955 Log(("evex prefix is not implemented!\n"));
1956 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1957}
1958
1959
1960/** Opcode 0x63 - non-64-bit modes. */
1961FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1962{
1963 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1964 IEMOP_HLP_MIN_286();
1965 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1967
1968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1969 {
1970 /* Register */
1971 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1972 IEM_MC_BEGIN(3, 0);
1973 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1974 IEM_MC_ARG(uint16_t, u16Src, 1);
1975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1976
1977 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1978 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1979 IEM_MC_REF_EFLAGS(pEFlags);
1980 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1981
1982 IEM_MC_ADVANCE_RIP();
1983 IEM_MC_END();
1984 }
1985 else
1986 {
1987 /* Memory */
1988 IEM_MC_BEGIN(3, 2);
1989 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1990 IEM_MC_ARG(uint16_t, u16Src, 1);
1991 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1993
1994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1995 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1996 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1997 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1998 IEM_MC_FETCH_EFLAGS(EFlags);
1999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2000
2001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2002 IEM_MC_COMMIT_EFLAGS(EFlags);
2003 IEM_MC_ADVANCE_RIP();
2004 IEM_MC_END();
2005 }
2006 return VINF_SUCCESS;
2007
2008}
2009
2010
2011/**
2012 * @opcode 0x63
2013 *
2014 * @note This is a weird one. It works like a regular move instruction if
2015 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2016 * @todo This definitely needs a testcase to verify the odd cases. */
2017FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2018{
2019 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2020
2021 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2023
2024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2025 {
2026 /*
2027 * Register to register.
2028 */
2029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2030 IEM_MC_BEGIN(0, 1);
2031 IEM_MC_LOCAL(uint64_t, u64Value);
2032 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2033 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2034 IEM_MC_ADVANCE_RIP();
2035 IEM_MC_END();
2036 }
2037 else
2038 {
2039 /*
2040 * We're loading a register from memory.
2041 */
2042 IEM_MC_BEGIN(0, 2);
2043 IEM_MC_LOCAL(uint64_t, u64Value);
2044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2047 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2048 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2049 IEM_MC_ADVANCE_RIP();
2050 IEM_MC_END();
2051 }
2052 return VINF_SUCCESS;
2053}
2054
2055
2056/**
2057 * @opcode 0x64
2058 * @opmnemonic segfs
2059 * @opmincpu 80386
2060 * @opgroup og_prefixes
2061 */
2062FNIEMOP_DEF(iemOp_seg_FS)
2063{
2064 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2065 IEMOP_HLP_MIN_386();
2066
2067 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2068 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2069
2070 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2071 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2072}
2073
2074
2075/**
2076 * @opcode 0x65
2077 * @opmnemonic seggs
2078 * @opmincpu 80386
2079 * @opgroup og_prefixes
2080 */
2081FNIEMOP_DEF(iemOp_seg_GS)
2082{
2083 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2084 IEMOP_HLP_MIN_386();
2085
2086 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2087 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2088
2089 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2090 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2091}
2092
2093
2094/**
2095 * @opcode 0x66
2096 * @opmnemonic opsize
2097 * @openc prefix
2098 * @opmincpu 80386
2099 * @ophints harmless
2100 * @opgroup og_prefixes
2101 */
2102FNIEMOP_DEF(iemOp_op_size)
2103{
2104 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2105 IEMOP_HLP_MIN_386();
2106
2107 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2108 iemRecalEffOpSize(pVCpu);
2109
2110 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2111 when REPZ or REPNZ are present. */
2112 if (pVCpu->iem.s.idxPrefix == 0)
2113 pVCpu->iem.s.idxPrefix = 1;
2114
2115 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2116 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2117}
2118
2119
2120/**
2121 * @opcode 0x67
2122 * @opmnemonic addrsize
2123 * @openc prefix
2124 * @opmincpu 80386
2125 * @ophints harmless
2126 * @opgroup og_prefixes
2127 */
2128FNIEMOP_DEF(iemOp_addr_size)
2129{
2130 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2131 IEMOP_HLP_MIN_386();
2132
2133 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2134 switch (pVCpu->iem.s.enmDefAddrMode)
2135 {
2136 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2137 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2138 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2139 default: AssertFailed();
2140 }
2141
2142 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2143 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2144}
2145
2146
2147/**
2148 * @opcode 0x68
2149 */
2150FNIEMOP_DEF(iemOp_push_Iz)
2151{
2152 IEMOP_MNEMONIC(push_Iz, "push Iz");
2153 IEMOP_HLP_MIN_186();
2154 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2155 switch (pVCpu->iem.s.enmEffOpSize)
2156 {
2157 case IEMMODE_16BIT:
2158 {
2159 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2161 IEM_MC_BEGIN(0,0);
2162 IEM_MC_PUSH_U16(u16Imm);
2163 IEM_MC_ADVANCE_RIP();
2164 IEM_MC_END();
2165 return VINF_SUCCESS;
2166 }
2167
2168 case IEMMODE_32BIT:
2169 {
2170 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2172 IEM_MC_BEGIN(0,0);
2173 IEM_MC_PUSH_U32(u32Imm);
2174 IEM_MC_ADVANCE_RIP();
2175 IEM_MC_END();
2176 return VINF_SUCCESS;
2177 }
2178
2179 case IEMMODE_64BIT:
2180 {
2181 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_BEGIN(0,0);
2184 IEM_MC_PUSH_U64(u64Imm);
2185 IEM_MC_ADVANCE_RIP();
2186 IEM_MC_END();
2187 return VINF_SUCCESS;
2188 }
2189
2190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2191 }
2192}
2193
2194
2195/**
2196 * @opcode 0x69
2197 */
2198FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2199{
2200 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2201 IEMOP_HLP_MIN_186();
2202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2204
2205 switch (pVCpu->iem.s.enmEffOpSize)
2206 {
2207 case IEMMODE_16BIT:
2208 {
2209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2210 {
2211 /* register operand */
2212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214
2215 IEM_MC_BEGIN(3, 1);
2216 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2217 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2218 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2219 IEM_MC_LOCAL(uint16_t, u16Tmp);
2220
2221 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2222 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2223 IEM_MC_REF_EFLAGS(pEFlags);
2224 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2225 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2226
2227 IEM_MC_ADVANCE_RIP();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /* memory operand */
2233 IEM_MC_BEGIN(3, 2);
2234 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2235 IEM_MC_ARG(uint16_t, u16Src, 1);
2236 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2237 IEM_MC_LOCAL(uint16_t, u16Tmp);
2238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2239
2240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEM_MC_ASSIGN(u16Src, u16Imm);
2243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2244 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2245 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2246 IEM_MC_REF_EFLAGS(pEFlags);
2247 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2248 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2249
2250 IEM_MC_ADVANCE_RIP();
2251 IEM_MC_END();
2252 }
2253 return VINF_SUCCESS;
2254 }
2255
2256 case IEMMODE_32BIT:
2257 {
2258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2259 {
2260 /* register operand */
2261 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263
2264 IEM_MC_BEGIN(3, 1);
2265 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2266 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2268 IEM_MC_LOCAL(uint32_t, u32Tmp);
2269
2270 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2271 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2272 IEM_MC_REF_EFLAGS(pEFlags);
2273 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2274 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2275
2276 IEM_MC_ADVANCE_RIP();
2277 IEM_MC_END();
2278 }
2279 else
2280 {
2281 /* memory operand */
2282 IEM_MC_BEGIN(3, 2);
2283 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2284 IEM_MC_ARG(uint32_t, u32Src, 1);
2285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2286 IEM_MC_LOCAL(uint32_t, u32Tmp);
2287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2288
2289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2290 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2291 IEM_MC_ASSIGN(u32Src, u32Imm);
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2294 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2295 IEM_MC_REF_EFLAGS(pEFlags);
2296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2297 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2298
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 return VINF_SUCCESS;
2303 }
2304
2305 case IEMMODE_64BIT:
2306 {
2307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2308 {
2309 /* register operand */
2310 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2312
2313 IEM_MC_BEGIN(3, 1);
2314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2315 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2317 IEM_MC_LOCAL(uint64_t, u64Tmp);
2318
2319 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2320 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2321 IEM_MC_REF_EFLAGS(pEFlags);
2322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2323 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2324
2325 IEM_MC_ADVANCE_RIP();
2326 IEM_MC_END();
2327 }
2328 else
2329 {
2330 /* memory operand */
2331 IEM_MC_BEGIN(3, 2);
2332 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2333 IEM_MC_ARG(uint64_t, u64Src, 1);
2334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2335 IEM_MC_LOCAL(uint64_t, u64Tmp);
2336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2337
2338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2339 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2340 IEM_MC_ASSIGN(u64Src, u64Imm);
2341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2342 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2343 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2344 IEM_MC_REF_EFLAGS(pEFlags);
2345 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2346 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2347
2348 IEM_MC_ADVANCE_RIP();
2349 IEM_MC_END();
2350 }
2351 return VINF_SUCCESS;
2352 }
2353 }
2354 AssertFailedReturn(VERR_IEM_IPE_9);
2355}
2356
2357
2358/**
2359 * @opcode 0x6a
2360 */
2361FNIEMOP_DEF(iemOp_push_Ib)
2362{
2363 IEMOP_MNEMONIC(push_Ib, "push Ib");
2364 IEMOP_HLP_MIN_186();
2365 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2367 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2368
2369 IEM_MC_BEGIN(0,0);
2370 switch (pVCpu->iem.s.enmEffOpSize)
2371 {
2372 case IEMMODE_16BIT:
2373 IEM_MC_PUSH_U16(i8Imm);
2374 break;
2375 case IEMMODE_32BIT:
2376 IEM_MC_PUSH_U32(i8Imm);
2377 break;
2378 case IEMMODE_64BIT:
2379 IEM_MC_PUSH_U64(i8Imm);
2380 break;
2381 }
2382 IEM_MC_ADVANCE_RIP();
2383 IEM_MC_END();
2384 return VINF_SUCCESS;
2385}
2386
2387
2388/**
2389 * @opcode 0x6b
2390 */
2391FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2392{
2393 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2394 IEMOP_HLP_MIN_186();
2395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2397
2398 switch (pVCpu->iem.s.enmEffOpSize)
2399 {
2400 case IEMMODE_16BIT:
2401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2402 {
2403 /* register operand */
2404 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2406
2407 IEM_MC_BEGIN(3, 1);
2408 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2409 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2411 IEM_MC_LOCAL(uint16_t, u16Tmp);
2412
2413 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2414 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2415 IEM_MC_REF_EFLAGS(pEFlags);
2416 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2417 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2418
2419 IEM_MC_ADVANCE_RIP();
2420 IEM_MC_END();
2421 }
2422 else
2423 {
2424 /* memory operand */
2425 IEM_MC_BEGIN(3, 2);
2426 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2427 IEM_MC_ARG(uint16_t, u16Src, 1);
2428 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2429 IEM_MC_LOCAL(uint16_t, u16Tmp);
2430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2431
2432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2433 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2434 IEM_MC_ASSIGN(u16Src, u16Imm);
2435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2436 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2437 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2438 IEM_MC_REF_EFLAGS(pEFlags);
2439 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2440 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2441
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 return VINF_SUCCESS;
2446
2447 case IEMMODE_32BIT:
2448 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2449 {
2450 /* register operand */
2451 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2453
2454 IEM_MC_BEGIN(3, 1);
2455 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2456 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2458 IEM_MC_LOCAL(uint32_t, u32Tmp);
2459
2460 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2461 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2462 IEM_MC_REF_EFLAGS(pEFlags);
2463 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2464 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2465
2466 IEM_MC_ADVANCE_RIP();
2467 IEM_MC_END();
2468 }
2469 else
2470 {
2471 /* memory operand */
2472 IEM_MC_BEGIN(3, 2);
2473 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2474 IEM_MC_ARG(uint32_t, u32Src, 1);
2475 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2476 IEM_MC_LOCAL(uint32_t, u32Tmp);
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2480 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2481 IEM_MC_ASSIGN(u32Src, u32Imm);
2482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2483 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2484 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2485 IEM_MC_REF_EFLAGS(pEFlags);
2486 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2487 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2488
2489 IEM_MC_ADVANCE_RIP();
2490 IEM_MC_END();
2491 }
2492 return VINF_SUCCESS;
2493
2494 case IEMMODE_64BIT:
2495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2496 {
2497 /* register operand */
2498 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500
2501 IEM_MC_BEGIN(3, 1);
2502 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2503 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint64_t, u64Tmp);
2506
2507 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2508 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2509 IEM_MC_REF_EFLAGS(pEFlags);
2510 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2511 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2512
2513 IEM_MC_ADVANCE_RIP();
2514 IEM_MC_END();
2515 }
2516 else
2517 {
2518 /* memory operand */
2519 IEM_MC_BEGIN(3, 2);
2520 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2521 IEM_MC_ARG(uint64_t, u64Src, 1);
2522 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2523 IEM_MC_LOCAL(uint64_t, u64Tmp);
2524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2525
2526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2527 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2528 IEM_MC_ASSIGN(u64Src, u64Imm);
2529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2530 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2531 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2532 IEM_MC_REF_EFLAGS(pEFlags);
2533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2534 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2535
2536 IEM_MC_ADVANCE_RIP();
2537 IEM_MC_END();
2538 }
2539 return VINF_SUCCESS;
2540 }
2541 AssertFailedReturn(VERR_IEM_IPE_8);
2542}
2543
2544
2545/**
2546 * @opcode 0x6c
2547 */
2548FNIEMOP_DEF(iemOp_insb_Yb_DX)
2549{
2550 IEMOP_HLP_MIN_186();
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2552 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2553 {
2554 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2555 switch (pVCpu->iem.s.enmEffAddrMode)
2556 {
2557 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2558 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2559 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2561 }
2562 }
2563 else
2564 {
2565 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2566 switch (pVCpu->iem.s.enmEffAddrMode)
2567 {
2568 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2569 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2570 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2572 }
2573 }
2574}
2575
2576
2577/**
2578 * @opcode 0x6d
2579 */
2580FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2581{
2582 IEMOP_HLP_MIN_186();
2583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2584 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2585 {
2586 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2587 switch (pVCpu->iem.s.enmEffOpSize)
2588 {
2589 case IEMMODE_16BIT:
2590 switch (pVCpu->iem.s.enmEffAddrMode)
2591 {
2592 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2593 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2594 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2596 }
2597 break;
2598 case IEMMODE_64BIT:
2599 case IEMMODE_32BIT:
2600 switch (pVCpu->iem.s.enmEffAddrMode)
2601 {
2602 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2603 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2604 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2606 }
2607 break;
2608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2609 }
2610 }
2611 else
2612 {
2613 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2614 switch (pVCpu->iem.s.enmEffOpSize)
2615 {
2616 case IEMMODE_16BIT:
2617 switch (pVCpu->iem.s.enmEffAddrMode)
2618 {
2619 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2623 }
2624 break;
2625 case IEMMODE_64BIT:
2626 case IEMMODE_32BIT:
2627 switch (pVCpu->iem.s.enmEffAddrMode)
2628 {
2629 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2630 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2631 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2633 }
2634 break;
2635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2636 }
2637 }
2638}
2639
2640
2641/**
2642 * @opcode 0x6e
2643 */
2644FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2645{
2646 IEMOP_HLP_MIN_186();
2647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2648 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2649 {
2650 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2651 switch (pVCpu->iem.s.enmEffAddrMode)
2652 {
2653 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2657 }
2658 }
2659 else
2660 {
2661 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2662 switch (pVCpu->iem.s.enmEffAddrMode)
2663 {
2664 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2665 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2666 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2668 }
2669 }
2670}
2671
2672
2673/**
2674 * @opcode 0x6f
2675 */
2676FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2677{
2678 IEMOP_HLP_MIN_186();
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2680 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2681 {
2682 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2683 switch (pVCpu->iem.s.enmEffOpSize)
2684 {
2685 case IEMMODE_16BIT:
2686 switch (pVCpu->iem.s.enmEffAddrMode)
2687 {
2688 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2689 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2690 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2692 }
2693 break;
2694 case IEMMODE_64BIT:
2695 case IEMMODE_32BIT:
2696 switch (pVCpu->iem.s.enmEffAddrMode)
2697 {
2698 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2699 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2700 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2702 }
2703 break;
2704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2705 }
2706 }
2707 else
2708 {
2709 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2710 switch (pVCpu->iem.s.enmEffOpSize)
2711 {
2712 case IEMMODE_16BIT:
2713 switch (pVCpu->iem.s.enmEffAddrMode)
2714 {
2715 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2719 }
2720 break;
2721 case IEMMODE_64BIT:
2722 case IEMMODE_32BIT:
2723 switch (pVCpu->iem.s.enmEffAddrMode)
2724 {
2725 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2726 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2727 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2729 }
2730 break;
2731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2732 }
2733 }
2734}
2735
2736
2737/**
2738 * @opcode 0x70
2739 */
2740FNIEMOP_DEF(iemOp_jo_Jb)
2741{
2742 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2743 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2745 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2746
2747 IEM_MC_BEGIN(0, 0);
2748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2749 IEM_MC_REL_JMP_S8(i8Imm);
2750 } IEM_MC_ELSE() {
2751 IEM_MC_ADVANCE_RIP();
2752 } IEM_MC_ENDIF();
2753 IEM_MC_END();
2754 return VINF_SUCCESS;
2755}
2756
2757
2758/**
2759 * @opcode 0x71
2760 */
2761FNIEMOP_DEF(iemOp_jno_Jb)
2762{
2763 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2764 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2766 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2767
2768 IEM_MC_BEGIN(0, 0);
2769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2770 IEM_MC_ADVANCE_RIP();
2771 } IEM_MC_ELSE() {
2772 IEM_MC_REL_JMP_S8(i8Imm);
2773 } IEM_MC_ENDIF();
2774 IEM_MC_END();
2775 return VINF_SUCCESS;
2776}
2777
2778/**
2779 * @opcode 0x72
2780 */
2781FNIEMOP_DEF(iemOp_jc_Jb)
2782{
2783 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2784 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2787
2788 IEM_MC_BEGIN(0, 0);
2789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2790 IEM_MC_REL_JMP_S8(i8Imm);
2791 } IEM_MC_ELSE() {
2792 IEM_MC_ADVANCE_RIP();
2793 } IEM_MC_ENDIF();
2794 IEM_MC_END();
2795 return VINF_SUCCESS;
2796}
2797
2798
2799/**
2800 * @opcode 0x73
2801 */
2802FNIEMOP_DEF(iemOp_jnc_Jb)
2803{
2804 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2805 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2808
2809 IEM_MC_BEGIN(0, 0);
2810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2811 IEM_MC_ADVANCE_RIP();
2812 } IEM_MC_ELSE() {
2813 IEM_MC_REL_JMP_S8(i8Imm);
2814 } IEM_MC_ENDIF();
2815 IEM_MC_END();
2816 return VINF_SUCCESS;
2817}
2818
2819
2820/**
2821 * @opcode 0x74
2822 */
2823FNIEMOP_DEF(iemOp_je_Jb)
2824{
2825 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2826 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2828 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2829
2830 IEM_MC_BEGIN(0, 0);
2831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2832 IEM_MC_REL_JMP_S8(i8Imm);
2833 } IEM_MC_ELSE() {
2834 IEM_MC_ADVANCE_RIP();
2835 } IEM_MC_ENDIF();
2836 IEM_MC_END();
2837 return VINF_SUCCESS;
2838}
2839
2840
2841/**
2842 * @opcode 0x75
2843 */
2844FNIEMOP_DEF(iemOp_jne_Jb)
2845{
2846 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2847 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2849 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2850
2851 IEM_MC_BEGIN(0, 0);
2852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2853 IEM_MC_ADVANCE_RIP();
2854 } IEM_MC_ELSE() {
2855 IEM_MC_REL_JMP_S8(i8Imm);
2856 } IEM_MC_ENDIF();
2857 IEM_MC_END();
2858 return VINF_SUCCESS;
2859}
2860
2861
2862/**
2863 * @opcode 0x76
2864 */
2865FNIEMOP_DEF(iemOp_jbe_Jb)
2866{
2867 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2868 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2870 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2871
2872 IEM_MC_BEGIN(0, 0);
2873 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2874 IEM_MC_REL_JMP_S8(i8Imm);
2875 } IEM_MC_ELSE() {
2876 IEM_MC_ADVANCE_RIP();
2877 } IEM_MC_ENDIF();
2878 IEM_MC_END();
2879 return VINF_SUCCESS;
2880}
2881
2882
2883/**
2884 * @opcode 0x77
2885 */
2886FNIEMOP_DEF(iemOp_jnbe_Jb)
2887{
2888 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2889 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2892
2893 IEM_MC_BEGIN(0, 0);
2894 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2895 IEM_MC_ADVANCE_RIP();
2896 } IEM_MC_ELSE() {
2897 IEM_MC_REL_JMP_S8(i8Imm);
2898 } IEM_MC_ENDIF();
2899 IEM_MC_END();
2900 return VINF_SUCCESS;
2901}
2902
2903
2904/**
2905 * @opcode 0x78
2906 */
2907FNIEMOP_DEF(iemOp_js_Jb)
2908{
2909 IEMOP_MNEMONIC(js_Jb, "js Jb");
2910 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2913
2914 IEM_MC_BEGIN(0, 0);
2915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2916 IEM_MC_REL_JMP_S8(i8Imm);
2917 } IEM_MC_ELSE() {
2918 IEM_MC_ADVANCE_RIP();
2919 } IEM_MC_ENDIF();
2920 IEM_MC_END();
2921 return VINF_SUCCESS;
2922}
2923
2924
2925/**
2926 * @opcode 0x79
2927 */
2928FNIEMOP_DEF(iemOp_jns_Jb)
2929{
2930 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2934
2935 IEM_MC_BEGIN(0, 0);
2936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2937 IEM_MC_ADVANCE_RIP();
2938 } IEM_MC_ELSE() {
2939 IEM_MC_REL_JMP_S8(i8Imm);
2940 } IEM_MC_ENDIF();
2941 IEM_MC_END();
2942 return VINF_SUCCESS;
2943}
2944
2945
2946/**
2947 * @opcode 0x7a
2948 */
2949FNIEMOP_DEF(iemOp_jp_Jb)
2950{
2951 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2952 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2955
2956 IEM_MC_BEGIN(0, 0);
2957 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2958 IEM_MC_REL_JMP_S8(i8Imm);
2959 } IEM_MC_ELSE() {
2960 IEM_MC_ADVANCE_RIP();
2961 } IEM_MC_ENDIF();
2962 IEM_MC_END();
2963 return VINF_SUCCESS;
2964}
2965
2966
2967/**
2968 * @opcode 0x7b
2969 */
2970FNIEMOP_DEF(iemOp_jnp_Jb)
2971{
2972 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2973 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2976
2977 IEM_MC_BEGIN(0, 0);
2978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2979 IEM_MC_ADVANCE_RIP();
2980 } IEM_MC_ELSE() {
2981 IEM_MC_REL_JMP_S8(i8Imm);
2982 } IEM_MC_ENDIF();
2983 IEM_MC_END();
2984 return VINF_SUCCESS;
2985}
2986
2987
2988/**
2989 * @opcode 0x7c
2990 */
2991FNIEMOP_DEF(iemOp_jl_Jb)
2992{
2993 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2994 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2996 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2997
2998 IEM_MC_BEGIN(0, 0);
2999 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3000 IEM_MC_REL_JMP_S8(i8Imm);
3001 } IEM_MC_ELSE() {
3002 IEM_MC_ADVANCE_RIP();
3003 } IEM_MC_ENDIF();
3004 IEM_MC_END();
3005 return VINF_SUCCESS;
3006}
3007
3008
3009/**
3010 * @opcode 0x7d
3011 */
3012FNIEMOP_DEF(iemOp_jnl_Jb)
3013{
3014 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3015 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3018
3019 IEM_MC_BEGIN(0, 0);
3020 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3021 IEM_MC_ADVANCE_RIP();
3022 } IEM_MC_ELSE() {
3023 IEM_MC_REL_JMP_S8(i8Imm);
3024 } IEM_MC_ENDIF();
3025 IEM_MC_END();
3026 return VINF_SUCCESS;
3027}
3028
3029
3030/**
3031 * @opcode 0x7e
3032 */
3033FNIEMOP_DEF(iemOp_jle_Jb)
3034{
3035 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3036 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3039
3040 IEM_MC_BEGIN(0, 0);
3041 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3042 IEM_MC_REL_JMP_S8(i8Imm);
3043 } IEM_MC_ELSE() {
3044 IEM_MC_ADVANCE_RIP();
3045 } IEM_MC_ENDIF();
3046 IEM_MC_END();
3047 return VINF_SUCCESS;
3048}
3049
3050
3051/**
3052 * @opcode 0x7f
3053 */
3054FNIEMOP_DEF(iemOp_jnle_Jb)
3055{
3056 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3057 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3060
3061 IEM_MC_BEGIN(0, 0);
3062 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3063 IEM_MC_ADVANCE_RIP();
3064 } IEM_MC_ELSE() {
3065 IEM_MC_REL_JMP_S8(i8Imm);
3066 } IEM_MC_ENDIF();
3067 IEM_MC_END();
3068 return VINF_SUCCESS;
3069}
3070
3071
3072/**
3073 * @opcode 0x80
3074 */
3075FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3076{
3077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3078 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3079 {
3080 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3081 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3082 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3083 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3084 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3085 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3086 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3087 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3088 }
3089 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3090
3091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3092 {
3093 /* register target */
3094 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_BEGIN(3, 0);
3097 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3098 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3100
3101 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3102 IEM_MC_REF_EFLAGS(pEFlags);
3103 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3104
3105 IEM_MC_ADVANCE_RIP();
3106 IEM_MC_END();
3107 }
3108 else
3109 {
3110 /* memory target */
3111 uint32_t fAccess;
3112 if (pImpl->pfnLockedU8)
3113 fAccess = IEM_ACCESS_DATA_RW;
3114 else /* CMP */
3115 fAccess = IEM_ACCESS_DATA_R;
3116 IEM_MC_BEGIN(3, 2);
3117 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3118 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3120
3121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3122 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3123 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3124 if (pImpl->pfnLockedU8)
3125 IEMOP_HLP_DONE_DECODING();
3126 else
3127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3128
3129 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3130 IEM_MC_FETCH_EFLAGS(EFlags);
3131 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3133 else
3134 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3135
3136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3137 IEM_MC_COMMIT_EFLAGS(EFlags);
3138 IEM_MC_ADVANCE_RIP();
3139 IEM_MC_END();
3140 }
3141 return VINF_SUCCESS;
3142}
3143
3144
3145/**
3146 * @opcode 0x81
3147 */
3148FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3152 {
3153 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3154 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3155 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3156 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3157 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3158 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3159 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3160 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3161 }
3162 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3163
3164 switch (pVCpu->iem.s.enmEffOpSize)
3165 {
3166 case IEMMODE_16BIT:
3167 {
3168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3169 {
3170 /* register target */
3171 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3173 IEM_MC_BEGIN(3, 0);
3174 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3175 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3177
3178 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3179 IEM_MC_REF_EFLAGS(pEFlags);
3180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3181
3182 IEM_MC_ADVANCE_RIP();
3183 IEM_MC_END();
3184 }
3185 else
3186 {
3187 /* memory target */
3188 uint32_t fAccess;
3189 if (pImpl->pfnLockedU16)
3190 fAccess = IEM_ACCESS_DATA_RW;
3191 else /* CMP, TEST */
3192 fAccess = IEM_ACCESS_DATA_R;
3193 IEM_MC_BEGIN(3, 2);
3194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3195 IEM_MC_ARG(uint16_t, u16Src, 1);
3196 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3198
3199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEM_MC_ASSIGN(u16Src, u16Imm);
3202 if (pImpl->pfnLockedU16)
3203 IEMOP_HLP_DONE_DECODING();
3204 else
3205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3206 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3207 IEM_MC_FETCH_EFLAGS(EFlags);
3208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210 else
3211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3212
3213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3214 IEM_MC_COMMIT_EFLAGS(EFlags);
3215 IEM_MC_ADVANCE_RIP();
3216 IEM_MC_END();
3217 }
3218 break;
3219 }
3220
3221 case IEMMODE_32BIT:
3222 {
3223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3224 {
3225 /* register target */
3226 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_BEGIN(3, 0);
3229 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3230 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3232
3233 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3234 IEM_MC_REF_EFLAGS(pEFlags);
3235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3236 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3237
3238 IEM_MC_ADVANCE_RIP();
3239 IEM_MC_END();
3240 }
3241 else
3242 {
3243 /* memory target */
3244 uint32_t fAccess;
3245 if (pImpl->pfnLockedU32)
3246 fAccess = IEM_ACCESS_DATA_RW;
3247 else /* CMP, TEST */
3248 fAccess = IEM_ACCESS_DATA_R;
3249 IEM_MC_BEGIN(3, 2);
3250 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3251 IEM_MC_ARG(uint32_t, u32Src, 1);
3252 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3254
3255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3256 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3257 IEM_MC_ASSIGN(u32Src, u32Imm);
3258 if (pImpl->pfnLockedU32)
3259 IEMOP_HLP_DONE_DECODING();
3260 else
3261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3262 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3263 IEM_MC_FETCH_EFLAGS(EFlags);
3264 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3266 else
3267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3268
3269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3270 IEM_MC_COMMIT_EFLAGS(EFlags);
3271 IEM_MC_ADVANCE_RIP();
3272 IEM_MC_END();
3273 }
3274 break;
3275 }
3276
3277 case IEMMODE_64BIT:
3278 {
3279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3280 {
3281 /* register target */
3282 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3284 IEM_MC_BEGIN(3, 0);
3285 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3286 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3287 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3288
3289 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3290 IEM_MC_REF_EFLAGS(pEFlags);
3291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3292
3293 IEM_MC_ADVANCE_RIP();
3294 IEM_MC_END();
3295 }
3296 else
3297 {
3298 /* memory target */
3299 uint32_t fAccess;
3300 if (pImpl->pfnLockedU64)
3301 fAccess = IEM_ACCESS_DATA_RW;
3302 else /* CMP */
3303 fAccess = IEM_ACCESS_DATA_R;
3304 IEM_MC_BEGIN(3, 2);
3305 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3306 IEM_MC_ARG(uint64_t, u64Src, 1);
3307 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3309
3310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3312 if (pImpl->pfnLockedU64)
3313 IEMOP_HLP_DONE_DECODING();
3314 else
3315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3316 IEM_MC_ASSIGN(u64Src, u64Imm);
3317 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3318 IEM_MC_FETCH_EFLAGS(EFlags);
3319 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3321 else
3322 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3323
3324 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3325 IEM_MC_COMMIT_EFLAGS(EFlags);
3326 IEM_MC_ADVANCE_RIP();
3327 IEM_MC_END();
3328 }
3329 break;
3330 }
3331 }
3332 return VINF_SUCCESS;
3333}
3334
3335
3336/**
3337 * @opcode 0x82
3338 * @opmnemonic grp1_82
3339 * @opgroup og_groups
3340 */
3341FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3342{
3343 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3344 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3345}
3346
3347
3348/**
3349 * @opcode 0x83
3350 */
3351FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3352{
3353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3354 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3355 {
3356 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3357 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3358 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3359 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3360 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3361 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3362 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3363 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3364 }
3365 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3366 to the 386 even if absent in the intel reference manuals and some
3367 3rd party opcode listings. */
3368 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3369
3370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3371 {
3372 /*
3373 * Register target
3374 */
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3377 switch (pVCpu->iem.s.enmEffOpSize)
3378 {
3379 case IEMMODE_16BIT:
3380 {
3381 IEM_MC_BEGIN(3, 0);
3382 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3383 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3384 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3385
3386 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3387 IEM_MC_REF_EFLAGS(pEFlags);
3388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3389
3390 IEM_MC_ADVANCE_RIP();
3391 IEM_MC_END();
3392 break;
3393 }
3394
3395 case IEMMODE_32BIT:
3396 {
3397 IEM_MC_BEGIN(3, 0);
3398 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3399 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3400 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3401
3402 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3403 IEM_MC_REF_EFLAGS(pEFlags);
3404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3405 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3406
3407 IEM_MC_ADVANCE_RIP();
3408 IEM_MC_END();
3409 break;
3410 }
3411
3412 case IEMMODE_64BIT:
3413 {
3414 IEM_MC_BEGIN(3, 0);
3415 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3416 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3417 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3418
3419 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3420 IEM_MC_REF_EFLAGS(pEFlags);
3421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3422
3423 IEM_MC_ADVANCE_RIP();
3424 IEM_MC_END();
3425 break;
3426 }
3427 }
3428 }
3429 else
3430 {
3431 /*
3432 * Memory target.
3433 */
3434 uint32_t fAccess;
3435 if (pImpl->pfnLockedU16)
3436 fAccess = IEM_ACCESS_DATA_RW;
3437 else /* CMP */
3438 fAccess = IEM_ACCESS_DATA_R;
3439
3440 switch (pVCpu->iem.s.enmEffOpSize)
3441 {
3442 case IEMMODE_16BIT:
3443 {
3444 IEM_MC_BEGIN(3, 2);
3445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3446 IEM_MC_ARG(uint16_t, u16Src, 1);
3447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3449
3450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3451 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3452 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3453 if (pImpl->pfnLockedU16)
3454 IEMOP_HLP_DONE_DECODING();
3455 else
3456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3457 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3458 IEM_MC_FETCH_EFLAGS(EFlags);
3459 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3461 else
3462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3463
3464 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3465 IEM_MC_COMMIT_EFLAGS(EFlags);
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 break;
3469 }
3470
3471 case IEMMODE_32BIT:
3472 {
3473 IEM_MC_BEGIN(3, 2);
3474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3475 IEM_MC_ARG(uint32_t, u32Src, 1);
3476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3481 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3482 if (pImpl->pfnLockedU32)
3483 IEMOP_HLP_DONE_DECODING();
3484 else
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3487 IEM_MC_FETCH_EFLAGS(EFlags);
3488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3490 else
3491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3492
3493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3494 IEM_MC_COMMIT_EFLAGS(EFlags);
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 break;
3498 }
3499
3500 case IEMMODE_64BIT:
3501 {
3502 IEM_MC_BEGIN(3, 2);
3503 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3504 IEM_MC_ARG(uint64_t, u64Src, 1);
3505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3510 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3511 if (pImpl->pfnLockedU64)
3512 IEMOP_HLP_DONE_DECODING();
3513 else
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3516 IEM_MC_FETCH_EFLAGS(EFlags);
3517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3519 else
3520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3521
3522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3523 IEM_MC_COMMIT_EFLAGS(EFlags);
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 break;
3527 }
3528 }
3529 }
3530 return VINF_SUCCESS;
3531}
3532
3533
3534/**
3535 * @opcode 0x84
3536 */
3537FNIEMOP_DEF(iemOp_test_Eb_Gb)
3538{
3539 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3542}
3543
3544
3545/**
3546 * @opcode 0x85
3547 */
3548FNIEMOP_DEF(iemOp_test_Ev_Gv)
3549{
3550 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3551 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3553}
3554
3555
3556/**
3557 * @opcode 0x86
3558 */
3559FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3560{
3561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3562 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3563
3564 /*
3565 * If rm is denoting a register, no more instruction bytes.
3566 */
3567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3568 {
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570
3571 IEM_MC_BEGIN(0, 2);
3572 IEM_MC_LOCAL(uint8_t, uTmp1);
3573 IEM_MC_LOCAL(uint8_t, uTmp2);
3574
3575 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3576 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3577 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3578 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3579
3580 IEM_MC_ADVANCE_RIP();
3581 IEM_MC_END();
3582 }
3583 else
3584 {
3585 /*
3586 * We're accessing memory.
3587 */
3588/** @todo the register must be committed separately! */
3589 IEM_MC_BEGIN(2, 2);
3590 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3591 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3593
3594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3595 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3596 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3597 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3598 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3599
3600 IEM_MC_ADVANCE_RIP();
3601 IEM_MC_END();
3602 }
3603 return VINF_SUCCESS;
3604}
3605
3606
3607/**
3608 * @opcode 0x87
3609 */
3610FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3611{
3612 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3614
3615 /*
3616 * If rm is denoting a register, no more instruction bytes.
3617 */
3618 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3619 {
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621
3622 switch (pVCpu->iem.s.enmEffOpSize)
3623 {
3624 case IEMMODE_16BIT:
3625 IEM_MC_BEGIN(0, 2);
3626 IEM_MC_LOCAL(uint16_t, uTmp1);
3627 IEM_MC_LOCAL(uint16_t, uTmp2);
3628
3629 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3630 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3631 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3632 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3633
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 return VINF_SUCCESS;
3637
3638 case IEMMODE_32BIT:
3639 IEM_MC_BEGIN(0, 2);
3640 IEM_MC_LOCAL(uint32_t, uTmp1);
3641 IEM_MC_LOCAL(uint32_t, uTmp2);
3642
3643 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3644 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3645 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3646 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3647
3648 IEM_MC_ADVANCE_RIP();
3649 IEM_MC_END();
3650 return VINF_SUCCESS;
3651
3652 case IEMMODE_64BIT:
3653 IEM_MC_BEGIN(0, 2);
3654 IEM_MC_LOCAL(uint64_t, uTmp1);
3655 IEM_MC_LOCAL(uint64_t, uTmp2);
3656
3657 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3658 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3659 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3660 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3661
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 return VINF_SUCCESS;
3665
3666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3667 }
3668 }
3669 else
3670 {
3671 /*
3672 * We're accessing memory.
3673 */
3674 switch (pVCpu->iem.s.enmEffOpSize)
3675 {
3676/** @todo the register must be committed separately! */
3677 case IEMMODE_16BIT:
3678 IEM_MC_BEGIN(2, 2);
3679 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3680 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3682
3683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3684 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3685 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3686 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3687 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3688
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 return VINF_SUCCESS;
3692
3693 case IEMMODE_32BIT:
3694 IEM_MC_BEGIN(2, 2);
3695 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3696 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3698
3699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3700 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3701 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3702 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3704
3705 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3706 IEM_MC_ADVANCE_RIP();
3707 IEM_MC_END();
3708 return VINF_SUCCESS;
3709
3710 case IEMMODE_64BIT:
3711 IEM_MC_BEGIN(2, 2);
3712 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3713 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3715
3716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3717 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3718 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3719 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3721
3722 IEM_MC_ADVANCE_RIP();
3723 IEM_MC_END();
3724 return VINF_SUCCESS;
3725
3726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3727 }
3728 }
3729}
3730
3731
3732/**
3733 * @opcode 0x88
3734 */
3735FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3736{
3737 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3738
3739 uint8_t bRm;
3740 IEM_OPCODE_GET_NEXT_U8(&bRm);
3741
3742 /*
3743 * If rm is denoting a register, no more instruction bytes.
3744 */
3745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3746 {
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 IEM_MC_BEGIN(0, 1);
3749 IEM_MC_LOCAL(uint8_t, u8Value);
3750 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3751 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3752 IEM_MC_ADVANCE_RIP();
3753 IEM_MC_END();
3754 }
3755 else
3756 {
3757 /*
3758 * We're writing a register to memory.
3759 */
3760 IEM_MC_BEGIN(0, 2);
3761 IEM_MC_LOCAL(uint8_t, u8Value);
3762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3765 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3766 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3767 IEM_MC_ADVANCE_RIP();
3768 IEM_MC_END();
3769 }
3770 return VINF_SUCCESS;
3771
3772}
3773
3774
3775/**
3776 * @opcode 0x89
3777 */
3778FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3779{
3780 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3781
3782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3783
3784 /*
3785 * If rm is denoting a register, no more instruction bytes.
3786 */
3787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3788 {
3789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3790 switch (pVCpu->iem.s.enmEffOpSize)
3791 {
3792 case IEMMODE_16BIT:
3793 IEM_MC_BEGIN(0, 1);
3794 IEM_MC_LOCAL(uint16_t, u16Value);
3795 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3796 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3797 IEM_MC_ADVANCE_RIP();
3798 IEM_MC_END();
3799 break;
3800
3801 case IEMMODE_32BIT:
3802 IEM_MC_BEGIN(0, 1);
3803 IEM_MC_LOCAL(uint32_t, u32Value);
3804 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3805 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3806 IEM_MC_ADVANCE_RIP();
3807 IEM_MC_END();
3808 break;
3809
3810 case IEMMODE_64BIT:
3811 IEM_MC_BEGIN(0, 1);
3812 IEM_MC_LOCAL(uint64_t, u64Value);
3813 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3814 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3815 IEM_MC_ADVANCE_RIP();
3816 IEM_MC_END();
3817 break;
3818 }
3819 }
3820 else
3821 {
3822 /*
3823 * We're writing a register to memory.
3824 */
3825 switch (pVCpu->iem.s.enmEffOpSize)
3826 {
3827 case IEMMODE_16BIT:
3828 IEM_MC_BEGIN(0, 2);
3829 IEM_MC_LOCAL(uint16_t, u16Value);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3833 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3834 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 break;
3838
3839 case IEMMODE_32BIT:
3840 IEM_MC_BEGIN(0, 2);
3841 IEM_MC_LOCAL(uint32_t, u32Value);
3842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3846 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 break;
3850
3851 case IEMMODE_64BIT:
3852 IEM_MC_BEGIN(0, 2);
3853 IEM_MC_LOCAL(uint64_t, u64Value);
3854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3857 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3858 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 break;
3862 }
3863 }
3864 return VINF_SUCCESS;
3865}
3866
3867
3868/**
3869 * @opcode 0x8a
3870 */
3871FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3872{
3873 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3874
3875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3876
3877 /*
3878 * If rm is denoting a register, no more instruction bytes.
3879 */
3880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3881 {
3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3883 IEM_MC_BEGIN(0, 1);
3884 IEM_MC_LOCAL(uint8_t, u8Value);
3885 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3886 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3887 IEM_MC_ADVANCE_RIP();
3888 IEM_MC_END();
3889 }
3890 else
3891 {
3892 /*
3893 * We're loading a register from memory.
3894 */
3895 IEM_MC_BEGIN(0, 2);
3896 IEM_MC_LOCAL(uint8_t, u8Value);
3897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3900 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3901 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3902 IEM_MC_ADVANCE_RIP();
3903 IEM_MC_END();
3904 }
3905 return VINF_SUCCESS;
3906}
3907
3908
3909/**
3910 * @opcode 0x8b
3911 */
3912FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3913{
3914 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3915
3916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3917
3918 /*
3919 * If rm is denoting a register, no more instruction bytes.
3920 */
3921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3922 {
3923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3924 switch (pVCpu->iem.s.enmEffOpSize)
3925 {
3926 case IEMMODE_16BIT:
3927 IEM_MC_BEGIN(0, 1);
3928 IEM_MC_LOCAL(uint16_t, u16Value);
3929 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3930 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 break;
3934
3935 case IEMMODE_32BIT:
3936 IEM_MC_BEGIN(0, 1);
3937 IEM_MC_LOCAL(uint32_t, u32Value);
3938 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3939 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3940 IEM_MC_ADVANCE_RIP();
3941 IEM_MC_END();
3942 break;
3943
3944 case IEMMODE_64BIT:
3945 IEM_MC_BEGIN(0, 1);
3946 IEM_MC_LOCAL(uint64_t, u64Value);
3947 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3948 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3949 IEM_MC_ADVANCE_RIP();
3950 IEM_MC_END();
3951 break;
3952 }
3953 }
3954 else
3955 {
3956 /*
3957 * We're loading a register from memory.
3958 */
3959 switch (pVCpu->iem.s.enmEffOpSize)
3960 {
3961 case IEMMODE_16BIT:
3962 IEM_MC_BEGIN(0, 2);
3963 IEM_MC_LOCAL(uint16_t, u16Value);
3964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3967 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3968 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 break;
3972
3973 case IEMMODE_32BIT:
3974 IEM_MC_BEGIN(0, 2);
3975 IEM_MC_LOCAL(uint32_t, u32Value);
3976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3979 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3980 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3981 IEM_MC_ADVANCE_RIP();
3982 IEM_MC_END();
3983 break;
3984
3985 case IEMMODE_64BIT:
3986 IEM_MC_BEGIN(0, 2);
3987 IEM_MC_LOCAL(uint64_t, u64Value);
3988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3991 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3992 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 break;
3996 }
3997 }
3998 return VINF_SUCCESS;
3999}
4000
4001
4002/**
4003 * opcode 0x63
4004 * @todo Table fixme
4005 */
4006FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4007{
4008 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4009 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4010 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4011 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4012 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4013}
4014
4015
4016/**
4017 * @opcode 0x8c
4018 */
4019FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4020{
4021 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4022
4023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4024
4025 /*
4026 * Check that the destination register exists. The REX.R prefix is ignored.
4027 */
4028 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4029 if ( iSegReg > X86_SREG_GS)
4030 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4031
4032 /*
4033 * If rm is denoting a register, no more instruction bytes.
4034 * In that case, the operand size is respected and the upper bits are
4035 * cleared (starting with some pentium).
4036 */
4037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4038 {
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4040 switch (pVCpu->iem.s.enmEffOpSize)
4041 {
4042 case IEMMODE_16BIT:
4043 IEM_MC_BEGIN(0, 1);
4044 IEM_MC_LOCAL(uint16_t, u16Value);
4045 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4046 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4047 IEM_MC_ADVANCE_RIP();
4048 IEM_MC_END();
4049 break;
4050
4051 case IEMMODE_32BIT:
4052 IEM_MC_BEGIN(0, 1);
4053 IEM_MC_LOCAL(uint32_t, u32Value);
4054 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4055 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4056 IEM_MC_ADVANCE_RIP();
4057 IEM_MC_END();
4058 break;
4059
4060 case IEMMODE_64BIT:
4061 IEM_MC_BEGIN(0, 1);
4062 IEM_MC_LOCAL(uint64_t, u64Value);
4063 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4064 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4065 IEM_MC_ADVANCE_RIP();
4066 IEM_MC_END();
4067 break;
4068 }
4069 }
4070 else
4071 {
4072 /*
4073 * We're saving the register to memory. The access is word sized
4074 * regardless of operand size prefixes.
4075 */
4076#if 0 /* not necessary */
4077 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4078#endif
4079 IEM_MC_BEGIN(0, 2);
4080 IEM_MC_LOCAL(uint16_t, u16Value);
4081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4085 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4086 IEM_MC_ADVANCE_RIP();
4087 IEM_MC_END();
4088 }
4089 return VINF_SUCCESS;
4090}
4091
4092
4093
4094
4095/**
4096 * @opcode 0x8d
4097 */
4098FNIEMOP_DEF(iemOp_lea_Gv_M)
4099{
4100 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4103 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4104
4105 switch (pVCpu->iem.s.enmEffOpSize)
4106 {
4107 case IEMMODE_16BIT:
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4110 IEM_MC_LOCAL(uint16_t, u16Cast);
4111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4114 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 return VINF_SUCCESS;
4118
4119 case IEMMODE_32BIT:
4120 IEM_MC_BEGIN(0, 2);
4121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4122 IEM_MC_LOCAL(uint32_t, u32Cast);
4123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4125 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4126 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4127 IEM_MC_ADVANCE_RIP();
4128 IEM_MC_END();
4129 return VINF_SUCCESS;
4130
4131 case IEMMODE_64BIT:
4132 IEM_MC_BEGIN(0, 1);
4133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4136 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4137 IEM_MC_ADVANCE_RIP();
4138 IEM_MC_END();
4139 return VINF_SUCCESS;
4140 }
4141 AssertFailedReturn(VERR_IEM_IPE_7);
4142}
4143
4144
4145/**
4146 * @opcode 0x8e
4147 */
4148FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4149{
4150 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4151
4152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4153
4154 /*
4155 * The practical operand size is 16-bit.
4156 */
4157#if 0 /* not necessary */
4158 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4159#endif
4160
4161 /*
4162 * Check that the destination register exists and can be used with this
4163 * instruction. The REX.R prefix is ignored.
4164 */
4165 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4166 if ( iSegReg == X86_SREG_CS
4167 || iSegReg > X86_SREG_GS)
4168 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4169
4170 /*
4171 * If rm is denoting a register, no more instruction bytes.
4172 */
4173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4174 {
4175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4176 IEM_MC_BEGIN(2, 0);
4177 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4178 IEM_MC_ARG(uint16_t, u16Value, 1);
4179 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4180 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4181 IEM_MC_END();
4182 }
4183 else
4184 {
4185 /*
4186 * We're loading the register from memory. The access is word sized
4187 * regardless of operand size prefixes.
4188 */
4189 IEM_MC_BEGIN(2, 1);
4190 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4191 IEM_MC_ARG(uint16_t, u16Value, 1);
4192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4195 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4196 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4197 IEM_MC_END();
4198 }
4199 return VINF_SUCCESS;
4200}
4201
4202
4203/** Opcode 0x8f /0. */
4204FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4205{
4206 /* This bugger is rather annoying as it requires rSP to be updated before
4207 doing the effective address calculations. Will eventually require a
4208 split between the R/M+SIB decoding and the effective address
4209 calculation - which is something that is required for any attempt at
4210 reusing this code for a recompiler. It may also be good to have if we
4211 need to delay #UD exception caused by invalid lock prefixes.
4212
4213 For now, we'll do a mostly safe interpreter-only implementation here. */
4214 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4215 * now until tests show it's checked.. */
4216 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4217
4218 /* Register access is relatively easy and can share code. */
4219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4220 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4221
4222 /*
4223 * Memory target.
4224 *
4225 * Intel says that RSP is incremented before it's used in any effective
4226 * address calcuations. This means some serious extra annoyance here since
4227 * we decode and calculate the effective address in one step and like to
4228 * delay committing registers till everything is done.
4229 *
4230 * So, we'll decode and calculate the effective address twice. This will
4231 * require some recoding if turned into a recompiler.
4232 */
4233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4234
4235#ifndef TST_IEM_CHECK_MC
4236 /* Calc effective address with modified ESP. */
4237/** @todo testcase */
4238 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4239 RTGCPTR GCPtrEff;
4240 VBOXSTRICTRC rcStrict;
4241 switch (pVCpu->iem.s.enmEffOpSize)
4242 {
4243 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4244 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4245 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4247 }
4248 if (rcStrict != VINF_SUCCESS)
4249 return rcStrict;
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251
4252 /* Perform the operation - this should be CImpl. */
4253 RTUINT64U TmpRsp;
4254 TmpRsp.u = pCtx->rsp;
4255 switch (pVCpu->iem.s.enmEffOpSize)
4256 {
4257 case IEMMODE_16BIT:
4258 {
4259 uint16_t u16Value;
4260 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4261 if (rcStrict == VINF_SUCCESS)
4262 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4263 break;
4264 }
4265
4266 case IEMMODE_32BIT:
4267 {
4268 uint32_t u32Value;
4269 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4270 if (rcStrict == VINF_SUCCESS)
4271 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4272 break;
4273 }
4274
4275 case IEMMODE_64BIT:
4276 {
4277 uint64_t u64Value;
4278 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4279 if (rcStrict == VINF_SUCCESS)
4280 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4281 break;
4282 }
4283
4284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4285 }
4286 if (rcStrict == VINF_SUCCESS)
4287 {
4288 pCtx->rsp = TmpRsp.u;
4289 iemRegUpdateRipAndClearRF(pVCpu);
4290 }
4291 return rcStrict;
4292
4293#else
4294 return VERR_IEM_IPE_2;
4295#endif
4296}
4297
4298
4299/**
4300 * @opcode 0x8f
4301 */
4302FNIEMOP_DEF(iemOp_Grp1A__xop)
4303{
4304 /*
4305 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4306 * three byte VEX prefix, except that the mmmmm field cannot have the values
4307 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4308 */
4309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4310 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4311 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4312
4313 IEMOP_MNEMONIC(xop, "xop");
4314 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4315 {
4316 /** @todo Test when exctly the XOP conformance checks kick in during
4317 * instruction decoding and fetching (using \#PF). */
4318 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4319 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4320 if ( ( pVCpu->iem.s.fPrefixes
4321 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4322 == 0)
4323 {
4324 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4325 if (bXop2 & 0x80 /* XOP.W */)
4326 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4327 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4328 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4329 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4330 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4331 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4332 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4333
4334 /** @todo XOP: Just use new tables and decoders. */
4335 switch (bRm & 0x1f)
4336 {
4337 case 8: /* xop opcode map 8. */
4338 IEMOP_BITCH_ABOUT_STUB();
4339 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4340
4341 case 9: /* xop opcode map 9. */
4342 IEMOP_BITCH_ABOUT_STUB();
4343 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4344
4345 case 10: /* xop opcode map 10. */
4346 IEMOP_BITCH_ABOUT_STUB();
4347 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4348
4349 default:
4350 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4351 return IEMOP_RAISE_INVALID_OPCODE();
4352 }
4353 }
4354 else
4355 Log(("XOP: Invalid prefix mix!\n"));
4356 }
4357 else
4358 Log(("XOP: XOP support disabled!\n"));
4359 return IEMOP_RAISE_INVALID_OPCODE();
4360}
4361
4362
4363/**
4364 * Common 'xchg reg,rAX' helper.
4365 */
4366FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4367{
4368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4369
4370 iReg |= pVCpu->iem.s.uRexB;
4371 switch (pVCpu->iem.s.enmEffOpSize)
4372 {
4373 case IEMMODE_16BIT:
4374 IEM_MC_BEGIN(0, 2);
4375 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4376 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4377 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4378 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4379 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4380 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4381 IEM_MC_ADVANCE_RIP();
4382 IEM_MC_END();
4383 return VINF_SUCCESS;
4384
4385 case IEMMODE_32BIT:
4386 IEM_MC_BEGIN(0, 2);
4387 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4388 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4389 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4390 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4391 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4392 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4393 IEM_MC_ADVANCE_RIP();
4394 IEM_MC_END();
4395 return VINF_SUCCESS;
4396
4397 case IEMMODE_64BIT:
4398 IEM_MC_BEGIN(0, 2);
4399 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4400 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4401 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4402 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4403 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4404 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 return VINF_SUCCESS;
4408
4409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4410 }
4411}
4412
4413
4414/**
4415 * @opcode 0x90
4416 */
4417FNIEMOP_DEF(iemOp_nop)
4418{
4419 /* R8/R8D and RAX/EAX can be exchanged. */
4420 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4421 {
4422 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4423 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4424 }
4425
4426 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4427 IEMOP_MNEMONIC(pause, "pause");
4428 else
4429 IEMOP_MNEMONIC(nop, "nop");
4430 IEM_MC_BEGIN(0, 0);
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 return VINF_SUCCESS;
4434}
4435
4436
4437/**
4438 * @opcode 0x91
4439 */
4440FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4441{
4442 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4443 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4444}
4445
4446
4447/**
4448 * @opcode 0x92
4449 */
4450FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4451{
4452 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4453 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4454}
4455
4456
4457/**
4458 * @opcode 0x93
4459 */
4460FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4461{
4462 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4463 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4464}
4465
4466
4467/**
4468 * @opcode 0x94
4469 */
4470FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4471{
4472 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4473 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4474}
4475
4476
4477/**
4478 * @opcode 0x95
4479 */
4480FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4481{
4482 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4483 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4484}
4485
4486
4487/**
4488 * @opcode 0x96
4489 */
4490FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4491{
4492 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4493 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4494}
4495
4496
4497/**
4498 * @opcode 0x97
4499 */
4500FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4501{
4502 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4503 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4504}
4505
4506
4507/**
4508 * @opcode 0x98
4509 */
4510FNIEMOP_DEF(iemOp_cbw)
4511{
4512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4513 switch (pVCpu->iem.s.enmEffOpSize)
4514 {
4515 case IEMMODE_16BIT:
4516 IEMOP_MNEMONIC(cbw, "cbw");
4517 IEM_MC_BEGIN(0, 1);
4518 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4519 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4520 } IEM_MC_ELSE() {
4521 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4522 } IEM_MC_ENDIF();
4523 IEM_MC_ADVANCE_RIP();
4524 IEM_MC_END();
4525 return VINF_SUCCESS;
4526
4527 case IEMMODE_32BIT:
4528 IEMOP_MNEMONIC(cwde, "cwde");
4529 IEM_MC_BEGIN(0, 1);
4530 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4531 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4532 } IEM_MC_ELSE() {
4533 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4534 } IEM_MC_ENDIF();
4535 IEM_MC_ADVANCE_RIP();
4536 IEM_MC_END();
4537 return VINF_SUCCESS;
4538
4539 case IEMMODE_64BIT:
4540 IEMOP_MNEMONIC(cdqe, "cdqe");
4541 IEM_MC_BEGIN(0, 1);
4542 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4543 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4544 } IEM_MC_ELSE() {
4545 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4546 } IEM_MC_ENDIF();
4547 IEM_MC_ADVANCE_RIP();
4548 IEM_MC_END();
4549 return VINF_SUCCESS;
4550
4551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4552 }
4553}
4554
4555
4556/**
4557 * @opcode 0x99
4558 */
4559FNIEMOP_DEF(iemOp_cwd)
4560{
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562 switch (pVCpu->iem.s.enmEffOpSize)
4563 {
4564 case IEMMODE_16BIT:
4565 IEMOP_MNEMONIC(cwd, "cwd");
4566 IEM_MC_BEGIN(0, 1);
4567 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4568 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 return VINF_SUCCESS;
4575
4576 case IEMMODE_32BIT:
4577 IEMOP_MNEMONIC(cdq, "cdq");
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4580 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4581 } IEM_MC_ELSE() {
4582 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4583 } IEM_MC_ENDIF();
4584 IEM_MC_ADVANCE_RIP();
4585 IEM_MC_END();
4586 return VINF_SUCCESS;
4587
4588 case IEMMODE_64BIT:
4589 IEMOP_MNEMONIC(cqo, "cqo");
4590 IEM_MC_BEGIN(0, 1);
4591 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4592 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4593 } IEM_MC_ELSE() {
4594 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4595 } IEM_MC_ENDIF();
4596 IEM_MC_ADVANCE_RIP();
4597 IEM_MC_END();
4598 return VINF_SUCCESS;
4599
4600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4601 }
4602}
4603
4604
4605/**
4606 * @opcode 0x9a
4607 */
4608FNIEMOP_DEF(iemOp_call_Ap)
4609{
4610 IEMOP_MNEMONIC(call_Ap, "call Ap");
4611 IEMOP_HLP_NO_64BIT();
4612
4613 /* Decode the far pointer address and pass it on to the far call C implementation. */
4614 uint32_t offSeg;
4615 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4616 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4617 else
4618 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4619 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4621 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4622}
4623
4624
4625/** Opcode 0x9b. (aka fwait) */
4626FNIEMOP_DEF(iemOp_wait)
4627{
4628 IEMOP_MNEMONIC(wait, "wait");
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4630
4631 IEM_MC_BEGIN(0, 0);
4632 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 return VINF_SUCCESS;
4637}
4638
4639
4640/**
4641 * @opcode 0x9c
4642 */
4643FNIEMOP_DEF(iemOp_pushf_Fv)
4644{
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4647 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4648}
4649
4650
4651/**
4652 * @opcode 0x9d
4653 */
4654FNIEMOP_DEF(iemOp_popf_Fv)
4655{
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4658 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4659}
4660
4661
4662/**
4663 * @opcode 0x9e
4664 */
4665FNIEMOP_DEF(iemOp_sahf)
4666{
4667 IEMOP_MNEMONIC(sahf, "sahf");
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4670 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4671 return IEMOP_RAISE_INVALID_OPCODE();
4672 IEM_MC_BEGIN(0, 2);
4673 IEM_MC_LOCAL(uint32_t, u32Flags);
4674 IEM_MC_LOCAL(uint32_t, EFlags);
4675 IEM_MC_FETCH_EFLAGS(EFlags);
4676 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4677 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4678 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4679 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4680 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4681 IEM_MC_COMMIT_EFLAGS(EFlags);
4682 IEM_MC_ADVANCE_RIP();
4683 IEM_MC_END();
4684 return VINF_SUCCESS;
4685}
4686
4687
4688/**
4689 * @opcode 0x9f
4690 */
4691FNIEMOP_DEF(iemOp_lahf)
4692{
4693 IEMOP_MNEMONIC(lahf, "lahf");
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4696 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4697 return IEMOP_RAISE_INVALID_OPCODE();
4698 IEM_MC_BEGIN(0, 1);
4699 IEM_MC_LOCAL(uint8_t, u8Flags);
4700 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4701 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4702 IEM_MC_ADVANCE_RIP();
4703 IEM_MC_END();
4704 return VINF_SUCCESS;
4705}
4706
4707
4708/**
4709 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4710 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4711 * prefixes. Will return on failures.
4712 * @param a_GCPtrMemOff The variable to store the offset in.
4713 */
4714#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4715 do \
4716 { \
4717 switch (pVCpu->iem.s.enmEffAddrMode) \
4718 { \
4719 case IEMMODE_16BIT: \
4720 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4721 break; \
4722 case IEMMODE_32BIT: \
4723 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4724 break; \
4725 case IEMMODE_64BIT: \
4726 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4727 break; \
4728 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4729 } \
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4731 } while (0)
4732
4733/**
4734 * @opcode 0xa0
4735 */
4736FNIEMOP_DEF(iemOp_mov_AL_Ob)
4737{
4738 /*
4739 * Get the offset and fend of lock prefixes.
4740 */
4741 RTGCPTR GCPtrMemOff;
4742 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4743
4744 /*
4745 * Fetch AL.
4746 */
4747 IEM_MC_BEGIN(0,1);
4748 IEM_MC_LOCAL(uint8_t, u8Tmp);
4749 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4750 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 return VINF_SUCCESS;
4754}
4755
4756
4757/**
4758 * @opcode 0xa1
4759 */
4760FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4761{
4762 /*
4763 * Get the offset and fend of lock prefixes.
4764 */
4765 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4766 RTGCPTR GCPtrMemOff;
4767 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4768
4769 /*
4770 * Fetch rAX.
4771 */
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0,1);
4776 IEM_MC_LOCAL(uint16_t, u16Tmp);
4777 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4778 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 return VINF_SUCCESS;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0,1);
4785 IEM_MC_LOCAL(uint32_t, u32Tmp);
4786 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4787 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 return VINF_SUCCESS;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0,1);
4794 IEM_MC_LOCAL(uint64_t, u64Tmp);
4795 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4796 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 return VINF_SUCCESS;
4800
4801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4802 }
4803}
4804
4805
4806/**
4807 * @opcode 0xa2
4808 */
4809FNIEMOP_DEF(iemOp_mov_Ob_AL)
4810{
4811 /*
4812 * Get the offset and fend of lock prefixes.
4813 */
4814 RTGCPTR GCPtrMemOff;
4815 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4816
4817 /*
4818 * Store AL.
4819 */
4820 IEM_MC_BEGIN(0,1);
4821 IEM_MC_LOCAL(uint8_t, u8Tmp);
4822 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4823 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4824 IEM_MC_ADVANCE_RIP();
4825 IEM_MC_END();
4826 return VINF_SUCCESS;
4827}
4828
4829
4830/**
4831 * @opcode 0xa3
4832 */
4833FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4834{
4835 /*
4836 * Get the offset and fend of lock prefixes.
4837 */
4838 RTGCPTR GCPtrMemOff;
4839 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4840
4841 /*
4842 * Store rAX.
4843 */
4844 switch (pVCpu->iem.s.enmEffOpSize)
4845 {
4846 case IEMMODE_16BIT:
4847 IEM_MC_BEGIN(0,1);
4848 IEM_MC_LOCAL(uint16_t, u16Tmp);
4849 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4850 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4851 IEM_MC_ADVANCE_RIP();
4852 IEM_MC_END();
4853 return VINF_SUCCESS;
4854
4855 case IEMMODE_32BIT:
4856 IEM_MC_BEGIN(0,1);
4857 IEM_MC_LOCAL(uint32_t, u32Tmp);
4858 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4859 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4860 IEM_MC_ADVANCE_RIP();
4861 IEM_MC_END();
4862 return VINF_SUCCESS;
4863
4864 case IEMMODE_64BIT:
4865 IEM_MC_BEGIN(0,1);
4866 IEM_MC_LOCAL(uint64_t, u64Tmp);
4867 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4868 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4869 IEM_MC_ADVANCE_RIP();
4870 IEM_MC_END();
4871 return VINF_SUCCESS;
4872
4873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4874 }
4875}
4876
4877/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4878#define IEM_MOVS_CASE(ValBits, AddrBits) \
4879 IEM_MC_BEGIN(0, 2); \
4880 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4881 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4882 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4883 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4884 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4885 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4887 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4888 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4889 } IEM_MC_ELSE() { \
4890 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4891 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4892 } IEM_MC_ENDIF(); \
4893 IEM_MC_ADVANCE_RIP(); \
4894 IEM_MC_END();
4895
4896/**
4897 * @opcode 0xa4
4898 */
4899FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4900{
4901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4902
4903 /*
4904 * Use the C implementation if a repeat prefix is encountered.
4905 */
4906 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4907 {
4908 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4909 switch (pVCpu->iem.s.enmEffAddrMode)
4910 {
4911 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4912 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4913 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4915 }
4916 }
4917 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4918
4919 /*
4920 * Sharing case implementation with movs[wdq] below.
4921 */
4922 switch (pVCpu->iem.s.enmEffAddrMode)
4923 {
4924 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4925 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4926 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4928 }
4929 return VINF_SUCCESS;
4930}
4931
4932
4933/**
4934 * @opcode 0xa5
4935 */
4936FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4937{
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939
4940 /*
4941 * Use the C implementation if a repeat prefix is encountered.
4942 */
4943 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4944 {
4945 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4946 switch (pVCpu->iem.s.enmEffOpSize)
4947 {
4948 case IEMMODE_16BIT:
4949 switch (pVCpu->iem.s.enmEffAddrMode)
4950 {
4951 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4952 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4953 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4955 }
4956 break;
4957 case IEMMODE_32BIT:
4958 switch (pVCpu->iem.s.enmEffAddrMode)
4959 {
4960 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4961 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4962 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4964 }
4965 case IEMMODE_64BIT:
4966 switch (pVCpu->iem.s.enmEffAddrMode)
4967 {
4968 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4972 }
4973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4974 }
4975 }
4976 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4977
4978 /*
4979 * Annoying double switch here.
4980 * Using ugly macro for implementing the cases, sharing it with movsb.
4981 */
4982 switch (pVCpu->iem.s.enmEffOpSize)
4983 {
4984 case IEMMODE_16BIT:
4985 switch (pVCpu->iem.s.enmEffAddrMode)
4986 {
4987 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4988 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4989 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4991 }
4992 break;
4993
4994 case IEMMODE_32BIT:
4995 switch (pVCpu->iem.s.enmEffAddrMode)
4996 {
4997 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4998 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4999 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5001 }
5002 break;
5003
5004 case IEMMODE_64BIT:
5005 switch (pVCpu->iem.s.enmEffAddrMode)
5006 {
5007 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5008 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5009 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5011 }
5012 break;
5013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5014 }
5015 return VINF_SUCCESS;
5016}
5017
5018#undef IEM_MOVS_CASE
5019
5020/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5021#define IEM_CMPS_CASE(ValBits, AddrBits) \
5022 IEM_MC_BEGIN(3, 3); \
5023 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5024 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5025 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5026 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5027 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5028 \
5029 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5030 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5031 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5032 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5033 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5034 IEM_MC_REF_EFLAGS(pEFlags); \
5035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5036 \
5037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5038 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5039 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5040 } IEM_MC_ELSE() { \
5041 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5042 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5043 } IEM_MC_ENDIF(); \
5044 IEM_MC_ADVANCE_RIP(); \
5045 IEM_MC_END(); \
5046
5047/**
5048 * @opcode 0xa6
5049 */
5050FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5051{
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5053
5054 /*
5055 * Use the C implementation if a repeat prefix is encountered.
5056 */
5057 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5058 {
5059 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5060 switch (pVCpu->iem.s.enmEffAddrMode)
5061 {
5062 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5063 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5064 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5066 }
5067 }
5068 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5069 {
5070 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5071 switch (pVCpu->iem.s.enmEffAddrMode)
5072 {
5073 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5074 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5075 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5077 }
5078 }
5079 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5080
5081 /*
5082 * Sharing case implementation with cmps[wdq] below.
5083 */
5084 switch (pVCpu->iem.s.enmEffAddrMode)
5085 {
5086 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5087 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5088 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5090 }
5091 return VINF_SUCCESS;
5092
5093}
5094
5095
5096/**
5097 * @opcode 0xa7
5098 */
5099FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5100{
5101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5102
5103 /*
5104 * Use the C implementation if a repeat prefix is encountered.
5105 */
5106 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5107 {
5108 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5109 switch (pVCpu->iem.s.enmEffOpSize)
5110 {
5111 case IEMMODE_16BIT:
5112 switch (pVCpu->iem.s.enmEffAddrMode)
5113 {
5114 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5115 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5116 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5118 }
5119 break;
5120 case IEMMODE_32BIT:
5121 switch (pVCpu->iem.s.enmEffAddrMode)
5122 {
5123 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5124 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5125 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5127 }
5128 case IEMMODE_64BIT:
5129 switch (pVCpu->iem.s.enmEffAddrMode)
5130 {
5131 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5132 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5133 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5135 }
5136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5137 }
5138 }
5139
5140 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5141 {
5142 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5143 switch (pVCpu->iem.s.enmEffOpSize)
5144 {
5145 case IEMMODE_16BIT:
5146 switch (pVCpu->iem.s.enmEffAddrMode)
5147 {
5148 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5149 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5150 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5152 }
5153 break;
5154 case IEMMODE_32BIT:
5155 switch (pVCpu->iem.s.enmEffAddrMode)
5156 {
5157 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5158 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5159 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5161 }
5162 case IEMMODE_64BIT:
5163 switch (pVCpu->iem.s.enmEffAddrMode)
5164 {
5165 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5166 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5167 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5169 }
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173
5174 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5175
5176 /*
5177 * Annoying double switch here.
5178 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5179 */
5180 switch (pVCpu->iem.s.enmEffOpSize)
5181 {
5182 case IEMMODE_16BIT:
5183 switch (pVCpu->iem.s.enmEffAddrMode)
5184 {
5185 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5186 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5187 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5189 }
5190 break;
5191
5192 case IEMMODE_32BIT:
5193 switch (pVCpu->iem.s.enmEffAddrMode)
5194 {
5195 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5196 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5197 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5199 }
5200 break;
5201
5202 case IEMMODE_64BIT:
5203 switch (pVCpu->iem.s.enmEffAddrMode)
5204 {
5205 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5206 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5207 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5209 }
5210 break;
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213 return VINF_SUCCESS;
5214
5215}
5216
5217#undef IEM_CMPS_CASE
5218
5219/**
5220 * @opcode 0xa8
5221 */
5222FNIEMOP_DEF(iemOp_test_AL_Ib)
5223{
5224 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5225 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5226 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5227}
5228
5229
5230/**
5231 * @opcode 0xa9
5232 */
5233FNIEMOP_DEF(iemOp_test_eAX_Iz)
5234{
5235 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5236 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5237 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5238}
5239
5240
5241/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5242#define IEM_STOS_CASE(ValBits, AddrBits) \
5243 IEM_MC_BEGIN(0, 2); \
5244 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5245 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5246 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5247 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5248 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5250 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5251 } IEM_MC_ELSE() { \
5252 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5253 } IEM_MC_ENDIF(); \
5254 IEM_MC_ADVANCE_RIP(); \
5255 IEM_MC_END(); \
5256
5257/**
5258 * @opcode 0xaa
5259 */
5260FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5261{
5262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5263
5264 /*
5265 * Use the C implementation if a repeat prefix is encountered.
5266 */
5267 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5268 {
5269 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5270 switch (pVCpu->iem.s.enmEffAddrMode)
5271 {
5272 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5273 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5274 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5276 }
5277 }
5278 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5279
5280 /*
5281 * Sharing case implementation with stos[wdq] below.
5282 */
5283 switch (pVCpu->iem.s.enmEffAddrMode)
5284 {
5285 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5286 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5287 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5289 }
5290 return VINF_SUCCESS;
5291}
5292
5293
5294/**
5295 * @opcode 0xab
5296 */
5297FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5298{
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300
5301 /*
5302 * Use the C implementation if a repeat prefix is encountered.
5303 */
5304 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5305 {
5306 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5307 switch (pVCpu->iem.s.enmEffOpSize)
5308 {
5309 case IEMMODE_16BIT:
5310 switch (pVCpu->iem.s.enmEffAddrMode)
5311 {
5312 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5313 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5314 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 break;
5318 case IEMMODE_32BIT:
5319 switch (pVCpu->iem.s.enmEffAddrMode)
5320 {
5321 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5322 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5323 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5325 }
5326 case IEMMODE_64BIT:
5327 switch (pVCpu->iem.s.enmEffAddrMode)
5328 {
5329 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5330 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5331 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5333 }
5334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5335 }
5336 }
5337 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5338
5339 /*
5340 * Annoying double switch here.
5341 * Using ugly macro for implementing the cases, sharing it with stosb.
5342 */
5343 switch (pVCpu->iem.s.enmEffOpSize)
5344 {
5345 case IEMMODE_16BIT:
5346 switch (pVCpu->iem.s.enmEffAddrMode)
5347 {
5348 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5349 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5350 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5352 }
5353 break;
5354
5355 case IEMMODE_32BIT:
5356 switch (pVCpu->iem.s.enmEffAddrMode)
5357 {
5358 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5359 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5360 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5362 }
5363 break;
5364
5365 case IEMMODE_64BIT:
5366 switch (pVCpu->iem.s.enmEffAddrMode)
5367 {
5368 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5369 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5370 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5372 }
5373 break;
5374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5375 }
5376 return VINF_SUCCESS;
5377}
5378
5379#undef IEM_STOS_CASE
5380
5381/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5382#define IEM_LODS_CASE(ValBits, AddrBits) \
5383 IEM_MC_BEGIN(0, 2); \
5384 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5385 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5386 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5387 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5388 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5390 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5391 } IEM_MC_ELSE() { \
5392 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5393 } IEM_MC_ENDIF(); \
5394 IEM_MC_ADVANCE_RIP(); \
5395 IEM_MC_END();
5396
5397/**
5398 * @opcode 0xac
5399 */
5400FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5401{
5402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5403
5404 /*
5405 * Use the C implementation if a repeat prefix is encountered.
5406 */
5407 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5408 {
5409 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5410 switch (pVCpu->iem.s.enmEffAddrMode)
5411 {
5412 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5413 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5414 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5416 }
5417 }
5418 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5419
5420 /*
5421 * Sharing case implementation with stos[wdq] below.
5422 */
5423 switch (pVCpu->iem.s.enmEffAddrMode)
5424 {
5425 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5426 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5427 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 return VINF_SUCCESS;
5431}
5432
5433
5434/**
5435 * @opcode 0xad
5436 */
5437FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5438{
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440
5441 /*
5442 * Use the C implementation if a repeat prefix is encountered.
5443 */
5444 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5445 {
5446 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5447 switch (pVCpu->iem.s.enmEffOpSize)
5448 {
5449 case IEMMODE_16BIT:
5450 switch (pVCpu->iem.s.enmEffAddrMode)
5451 {
5452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5456 }
5457 break;
5458 case IEMMODE_32BIT:
5459 switch (pVCpu->iem.s.enmEffAddrMode)
5460 {
5461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5465 }
5466 case IEMMODE_64BIT:
5467 switch (pVCpu->iem.s.enmEffAddrMode)
5468 {
5469 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5470 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5471 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5473 }
5474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5475 }
5476 }
5477 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5478
5479 /*
5480 * Annoying double switch here.
5481 * Using ugly macro for implementing the cases, sharing it with lodsb.
5482 */
5483 switch (pVCpu->iem.s.enmEffOpSize)
5484 {
5485 case IEMMODE_16BIT:
5486 switch (pVCpu->iem.s.enmEffAddrMode)
5487 {
5488 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5489 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5490 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5492 }
5493 break;
5494
5495 case IEMMODE_32BIT:
5496 switch (pVCpu->iem.s.enmEffAddrMode)
5497 {
5498 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5499 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5500 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5502 }
5503 break;
5504
5505 case IEMMODE_64BIT:
5506 switch (pVCpu->iem.s.enmEffAddrMode)
5507 {
5508 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5509 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5510 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5512 }
5513 break;
5514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5515 }
5516 return VINF_SUCCESS;
5517}
5518
5519#undef IEM_LODS_CASE
5520
5521/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5522#define IEM_SCAS_CASE(ValBits, AddrBits) \
5523 IEM_MC_BEGIN(3, 2); \
5524 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5525 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5526 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5527 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5528 \
5529 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5530 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5531 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5532 IEM_MC_REF_EFLAGS(pEFlags); \
5533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5534 \
5535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5536 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5537 } IEM_MC_ELSE() { \
5538 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5539 } IEM_MC_ENDIF(); \
5540 IEM_MC_ADVANCE_RIP(); \
5541 IEM_MC_END();
5542
5543/**
5544 * @opcode 0xae
5545 */
5546FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5547{
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549
5550 /*
5551 * Use the C implementation if a repeat prefix is encountered.
5552 */
5553 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5554 {
5555 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5556 switch (pVCpu->iem.s.enmEffAddrMode)
5557 {
5558 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5559 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5560 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5562 }
5563 }
5564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5565 {
5566 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5567 switch (pVCpu->iem.s.enmEffAddrMode)
5568 {
5569 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5570 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5571 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5573 }
5574 }
5575 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5576
5577 /*
5578 * Sharing case implementation with stos[wdq] below.
5579 */
5580 switch (pVCpu->iem.s.enmEffAddrMode)
5581 {
5582 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5583 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5584 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5586 }
5587 return VINF_SUCCESS;
5588}
5589
5590
5591/**
5592 * @opcode 0xaf
5593 */
5594FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5595{
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597
5598 /*
5599 * Use the C implementation if a repeat prefix is encountered.
5600 */
5601 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5602 {
5603 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5604 switch (pVCpu->iem.s.enmEffOpSize)
5605 {
5606 case IEMMODE_16BIT:
5607 switch (pVCpu->iem.s.enmEffAddrMode)
5608 {
5609 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5610 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5611 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5613 }
5614 break;
5615 case IEMMODE_32BIT:
5616 switch (pVCpu->iem.s.enmEffAddrMode)
5617 {
5618 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5619 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5620 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 case IEMMODE_64BIT:
5624 switch (pVCpu->iem.s.enmEffAddrMode)
5625 {
5626 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5627 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5628 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5630 }
5631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5632 }
5633 }
5634 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5635 {
5636 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5637 switch (pVCpu->iem.s.enmEffOpSize)
5638 {
5639 case IEMMODE_16BIT:
5640 switch (pVCpu->iem.s.enmEffAddrMode)
5641 {
5642 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5643 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5644 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5646 }
5647 break;
5648 case IEMMODE_32BIT:
5649 switch (pVCpu->iem.s.enmEffAddrMode)
5650 {
5651 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5652 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5653 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5655 }
5656 case IEMMODE_64BIT:
5657 switch (pVCpu->iem.s.enmEffAddrMode)
5658 {
5659 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5663 }
5664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5665 }
5666 }
5667 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5668
5669 /*
5670 * Annoying double switch here.
5671 * Using ugly macro for implementing the cases, sharing it with scasb.
5672 */
5673 switch (pVCpu->iem.s.enmEffOpSize)
5674 {
5675 case IEMMODE_16BIT:
5676 switch (pVCpu->iem.s.enmEffAddrMode)
5677 {
5678 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5679 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5680 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5682 }
5683 break;
5684
5685 case IEMMODE_32BIT:
5686 switch (pVCpu->iem.s.enmEffAddrMode)
5687 {
5688 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5689 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5690 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5692 }
5693 break;
5694
5695 case IEMMODE_64BIT:
5696 switch (pVCpu->iem.s.enmEffAddrMode)
5697 {
5698 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5699 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5700 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5702 }
5703 break;
5704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5705 }
5706 return VINF_SUCCESS;
5707}
5708
5709#undef IEM_SCAS_CASE
5710
5711/**
5712 * Common 'mov r8, imm8' helper.
5713 */
5714FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5715{
5716 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718
5719 IEM_MC_BEGIN(0, 1);
5720 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5721 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5722 IEM_MC_ADVANCE_RIP();
5723 IEM_MC_END();
5724
5725 return VINF_SUCCESS;
5726}
5727
5728
5729/**
5730 * @opcode 0xb0
5731 */
5732FNIEMOP_DEF(iemOp_mov_AL_Ib)
5733{
5734 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5735 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5736}
5737
5738
5739/**
5740 * @opcode 0xb1
5741 */
5742FNIEMOP_DEF(iemOp_CL_Ib)
5743{
5744 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5746}
5747
5748
5749/**
5750 * @opcode 0xb2
5751 */
5752FNIEMOP_DEF(iemOp_DL_Ib)
5753{
5754 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5755 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5756}
5757
5758
5759/**
5760 * @opcode 0xb3
5761 */
5762FNIEMOP_DEF(iemOp_BL_Ib)
5763{
5764 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5765 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5766}
5767
5768
5769/**
5770 * @opcode 0xb4
5771 */
5772FNIEMOP_DEF(iemOp_mov_AH_Ib)
5773{
5774 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5775 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5776}
5777
5778
5779/**
5780 * @opcode 0xb5
5781 */
5782FNIEMOP_DEF(iemOp_CH_Ib)
5783{
5784 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5785 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5786}
5787
5788
5789/**
5790 * @opcode 0xb6
5791 */
5792FNIEMOP_DEF(iemOp_DH_Ib)
5793{
5794 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5795 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5796}
5797
5798
5799/**
5800 * @opcode 0xb7
5801 */
5802FNIEMOP_DEF(iemOp_BH_Ib)
5803{
5804 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5805 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5806}
5807
5808
5809/**
5810 * Common 'mov regX,immX' helper.
5811 */
5812FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5813{
5814 switch (pVCpu->iem.s.enmEffOpSize)
5815 {
5816 case IEMMODE_16BIT:
5817 {
5818 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5820
5821 IEM_MC_BEGIN(0, 1);
5822 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5823 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 break;
5827 }
5828
5829 case IEMMODE_32BIT:
5830 {
5831 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5833
5834 IEM_MC_BEGIN(0, 1);
5835 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5836 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5837 IEM_MC_ADVANCE_RIP();
5838 IEM_MC_END();
5839 break;
5840 }
5841 case IEMMODE_64BIT:
5842 {
5843 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845
5846 IEM_MC_BEGIN(0, 1);
5847 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5848 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 break;
5852 }
5853 }
5854
5855 return VINF_SUCCESS;
5856}
5857
5858
5859/**
5860 * @opcode 0xb8
5861 */
5862FNIEMOP_DEF(iemOp_eAX_Iv)
5863{
5864 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5865 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5866}
5867
5868
5869/**
5870 * @opcode 0xb9
5871 */
5872FNIEMOP_DEF(iemOp_eCX_Iv)
5873{
5874 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5875 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5876}
5877
5878
5879/**
5880 * @opcode 0xba
5881 */
5882FNIEMOP_DEF(iemOp_eDX_Iv)
5883{
5884 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5885 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5886}
5887
5888
5889/**
5890 * @opcode 0xbb
5891 */
5892FNIEMOP_DEF(iemOp_eBX_Iv)
5893{
5894 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5895 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5896}
5897
5898
5899/**
5900 * @opcode 0xbc
5901 */
5902FNIEMOP_DEF(iemOp_eSP_Iv)
5903{
5904 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5905 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5906}
5907
5908
5909/**
5910 * @opcode 0xbd
5911 */
5912FNIEMOP_DEF(iemOp_eBP_Iv)
5913{
5914 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5915 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5916}
5917
5918
5919/**
5920 * @opcode 0xbe
5921 */
5922FNIEMOP_DEF(iemOp_eSI_Iv)
5923{
5924 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5925 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5926}
5927
5928
5929/**
5930 * @opcode 0xbf
5931 */
5932FNIEMOP_DEF(iemOp_eDI_Iv)
5933{
5934 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5935 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5936}
5937
5938
5939/**
5940 * @opcode 0xc0
5941 */
5942FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5943{
5944 IEMOP_HLP_MIN_186();
5945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5946 PCIEMOPSHIFTSIZES pImpl;
5947 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5948 {
5949 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5950 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5951 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5952 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5953 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5954 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5955 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5956 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5957 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5958 }
5959 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5960
5961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5962 {
5963 /* register */
5964 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5966 IEM_MC_BEGIN(3, 0);
5967 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5968 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5969 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5970 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5971 IEM_MC_REF_EFLAGS(pEFlags);
5972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 }
5976 else
5977 {
5978 /* memory */
5979 IEM_MC_BEGIN(3, 2);
5980 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5981 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5982 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5984
5985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5986 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5987 IEM_MC_ASSIGN(cShiftArg, cShift);
5988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5989 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5990 IEM_MC_FETCH_EFLAGS(EFlags);
5991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5992
5993 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5994 IEM_MC_COMMIT_EFLAGS(EFlags);
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 }
5998 return VINF_SUCCESS;
5999}
6000
6001
6002/**
6003 * @opcode 0xc1
6004 */
6005FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6006{
6007 IEMOP_HLP_MIN_186();
6008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6009 PCIEMOPSHIFTSIZES pImpl;
6010 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6011 {
6012 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6013 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6014 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6015 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6016 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6017 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6018 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6019 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6020 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6021 }
6022 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6023
6024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6025 {
6026 /* register */
6027 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6029 switch (pVCpu->iem.s.enmEffOpSize)
6030 {
6031 case IEMMODE_16BIT:
6032 IEM_MC_BEGIN(3, 0);
6033 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6034 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6036 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6037 IEM_MC_REF_EFLAGS(pEFlags);
6038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 return VINF_SUCCESS;
6042
6043 case IEMMODE_32BIT:
6044 IEM_MC_BEGIN(3, 0);
6045 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6046 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6048 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6049 IEM_MC_REF_EFLAGS(pEFlags);
6050 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6051 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6052 IEM_MC_ADVANCE_RIP();
6053 IEM_MC_END();
6054 return VINF_SUCCESS;
6055
6056 case IEMMODE_64BIT:
6057 IEM_MC_BEGIN(3, 0);
6058 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6059 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6061 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6062 IEM_MC_REF_EFLAGS(pEFlags);
6063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 return VINF_SUCCESS;
6067
6068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6069 }
6070 }
6071 else
6072 {
6073 /* memory */
6074 switch (pVCpu->iem.s.enmEffOpSize)
6075 {
6076 case IEMMODE_16BIT:
6077 IEM_MC_BEGIN(3, 2);
6078 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6079 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6080 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6082
6083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6084 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6085 IEM_MC_ASSIGN(cShiftArg, cShift);
6086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6087 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6088 IEM_MC_FETCH_EFLAGS(EFlags);
6089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6090
6091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6092 IEM_MC_COMMIT_EFLAGS(EFlags);
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 return VINF_SUCCESS;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(3, 2);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6101 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6103
6104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6105 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6106 IEM_MC_ASSIGN(cShiftArg, cShift);
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6109 IEM_MC_FETCH_EFLAGS(EFlags);
6110 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6111
6112 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6113 IEM_MC_COMMIT_EFLAGS(EFlags);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 return VINF_SUCCESS;
6117
6118 case IEMMODE_64BIT:
6119 IEM_MC_BEGIN(3, 2);
6120 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6121 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6122 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6124
6125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6126 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6127 IEM_MC_ASSIGN(cShiftArg, cShift);
6128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6129 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6130 IEM_MC_FETCH_EFLAGS(EFlags);
6131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6132
6133 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6134 IEM_MC_COMMIT_EFLAGS(EFlags);
6135 IEM_MC_ADVANCE_RIP();
6136 IEM_MC_END();
6137 return VINF_SUCCESS;
6138
6139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6140 }
6141 }
6142}
6143
6144
6145/**
6146 * @opcode 0xc2
6147 */
6148FNIEMOP_DEF(iemOp_retn_Iw)
6149{
6150 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6151 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6154 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6155}
6156
6157
6158/**
6159 * @opcode 0xc3
6160 */
6161FNIEMOP_DEF(iemOp_retn)
6162{
6163 IEMOP_MNEMONIC(retn, "retn");
6164 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6167}
6168
6169
6170/**
6171 * @opcode 0xc4
6172 */
6173FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
6174{
6175 /* The LDS instruction is invalid 64-bit mode. In legacy and
6176 compatability mode it is invalid with MOD=3.
6177 The use as a VEX prefix is made possible by assigning the inverted
6178 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6179 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6181 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
6182 {
6183 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6184 {
6185 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6186 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6187 }
6188 IEMOP_HLP_NO_REAL_OR_V86_MODE();
6189 }
6190
6191 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6192 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6193 {
6194 /** @todo Test when exctly the VEX conformance checks kick in during
6195 * instruction decoding and fetching (using \#PF). */
6196 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6197 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6198 if ( ( pVCpu->iem.s.fPrefixes
6199 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6200 == 0)
6201 {
6202 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6203 if (bVex2 & 0x80 /* VEX.W */)
6204 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6205 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6206 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6207 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6208 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6209 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6210 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6211
6212 switch (bRm & 0x1f)
6213 {
6214 case 1: /* 0x0f lead opcode byte. */
6215 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6216
6217 case 2: /* 0x0f 0x38 lead opcode bytes. */
6218 /** @todo VEX: Just use new tables and decoders. */
6219 IEMOP_BITCH_ABOUT_STUB();
6220 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6221
6222 case 3: /* 0x0f 0x3a lead opcode bytes. */
6223 /** @todo VEX: Just use new tables and decoders. */
6224 IEMOP_BITCH_ABOUT_STUB();
6225 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6226
6227 default:
6228 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6229 return IEMOP_RAISE_INVALID_OPCODE();
6230 }
6231 }
6232 else
6233 Log(("VEX3: Invalid prefix mix!\n"));
6234 }
6235 else
6236 Log(("VEX3: AVX support disabled!\n"));
6237 return IEMOP_RAISE_INVALID_OPCODE();
6238}
6239
6240
6241/**
6242 * @opcode 0xc5
6243 */
6244FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
6245{
6246 /* The LES instruction is invalid 64-bit mode. In legacy and
6247 compatability mode it is invalid with MOD=3.
6248 The use as a VEX prefix is made possible by assigning the inverted
6249 REX.R to the top MOD bit, and the top bit in the inverted register
6250 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6251 to accessing registers 0..7 in this VEX form. */
6252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6253 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6254 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6255 {
6256 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6257 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6258 {
6259 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6260 if ( ( pVCpu->iem.s.fPrefixes
6261 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6262 == 0)
6263 {
6264 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6265 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6266 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6267 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6268 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6269
6270 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6271 }
6272
6273 Log(("VEX2: Invalid prefix mix!\n"));
6274 }
6275 else
6276 Log(("VEX2: AVX support disabled!\n"));
6277
6278 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6279 return IEMOP_RAISE_INVALID_OPCODE();
6280 }
6281
6282 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6283 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6284}
6285
6286
6287/**
6288 * @opcode 0xc6
6289 */
6290FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6291{
6292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6293 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6294 return IEMOP_RAISE_INVALID_OPCODE();
6295 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6296
6297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6298 {
6299 /* register access */
6300 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6302 IEM_MC_BEGIN(0, 0);
6303 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6304 IEM_MC_ADVANCE_RIP();
6305 IEM_MC_END();
6306 }
6307 else
6308 {
6309 /* memory access. */
6310 IEM_MC_BEGIN(0, 1);
6311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6313 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6315 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6316 IEM_MC_ADVANCE_RIP();
6317 IEM_MC_END();
6318 }
6319 return VINF_SUCCESS;
6320}
6321
6322
6323/**
6324 * @opcode 0xc7
6325 */
6326FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6327{
6328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6329 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6330 return IEMOP_RAISE_INVALID_OPCODE();
6331 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6332
6333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6334 {
6335 /* register access */
6336 switch (pVCpu->iem.s.enmEffOpSize)
6337 {
6338 case IEMMODE_16BIT:
6339 IEM_MC_BEGIN(0, 0);
6340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6342 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6343 IEM_MC_ADVANCE_RIP();
6344 IEM_MC_END();
6345 return VINF_SUCCESS;
6346
6347 case IEMMODE_32BIT:
6348 IEM_MC_BEGIN(0, 0);
6349 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6351 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_64BIT:
6357 IEM_MC_BEGIN(0, 0);
6358 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6360 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 return VINF_SUCCESS;
6364
6365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6366 }
6367 }
6368 else
6369 {
6370 /* memory access. */
6371 switch (pVCpu->iem.s.enmEffOpSize)
6372 {
6373 case IEMMODE_16BIT:
6374 IEM_MC_BEGIN(0, 1);
6375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6377 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6379 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6380 IEM_MC_ADVANCE_RIP();
6381 IEM_MC_END();
6382 return VINF_SUCCESS;
6383
6384 case IEMMODE_32BIT:
6385 IEM_MC_BEGIN(0, 1);
6386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6388 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6391 IEM_MC_ADVANCE_RIP();
6392 IEM_MC_END();
6393 return VINF_SUCCESS;
6394
6395 case IEMMODE_64BIT:
6396 IEM_MC_BEGIN(0, 1);
6397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6399 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6401 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6402 IEM_MC_ADVANCE_RIP();
6403 IEM_MC_END();
6404 return VINF_SUCCESS;
6405
6406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6407 }
6408 }
6409}
6410
6411
6412
6413
6414/**
6415 * @opcode 0xc8
6416 */
6417FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6418{
6419 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6420 IEMOP_HLP_MIN_186();
6421 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6422 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6423 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6426}
6427
6428
6429/**
6430 * @opcode 0xc9
6431 */
6432FNIEMOP_DEF(iemOp_leave)
6433{
6434 IEMOP_MNEMONIC(leave, "leave");
6435 IEMOP_HLP_MIN_186();
6436 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6438 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6439}
6440
6441
6442/**
6443 * @opcode 0xca
6444 */
6445FNIEMOP_DEF(iemOp_retf_Iw)
6446{
6447 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6448 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6450 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6451 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6452}
6453
6454
6455/**
6456 * @opcode 0xcb
6457 */
6458FNIEMOP_DEF(iemOp_retf)
6459{
6460 IEMOP_MNEMONIC(retf, "retf");
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6463 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6464}
6465
6466
6467/**
6468 * @opcode 0xcc
6469 */
6470FNIEMOP_DEF(iemOp_int3)
6471{
6472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6473 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6474}
6475
6476
6477/**
6478 * @opcode 0xcd
6479 */
6480FNIEMOP_DEF(iemOp_int_Ib)
6481{
6482 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6485}
6486
6487
6488/**
6489 * @opcode 0xce
6490 */
6491FNIEMOP_DEF(iemOp_into)
6492{
6493 IEMOP_MNEMONIC(into, "into");
6494 IEMOP_HLP_NO_64BIT();
6495
6496 IEM_MC_BEGIN(2, 0);
6497 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6498 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6499 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6500 IEM_MC_END();
6501 return VINF_SUCCESS;
6502}
6503
6504
6505/**
6506 * @opcode 0xcf
6507 */
6508FNIEMOP_DEF(iemOp_iret)
6509{
6510 IEMOP_MNEMONIC(iret, "iret");
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6513}
6514
6515
6516/**
6517 * @opcode 0xd0
6518 */
6519FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6520{
6521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6522 PCIEMOPSHIFTSIZES pImpl;
6523 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6524 {
6525 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6526 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6527 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6528 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6529 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6530 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6531 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6532 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6533 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6534 }
6535 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6536
6537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6538 {
6539 /* register */
6540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6541 IEM_MC_BEGIN(3, 0);
6542 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6543 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6545 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6546 IEM_MC_REF_EFLAGS(pEFlags);
6547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6548 IEM_MC_ADVANCE_RIP();
6549 IEM_MC_END();
6550 }
6551 else
6552 {
6553 /* memory */
6554 IEM_MC_BEGIN(3, 2);
6555 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6556 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6557 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6559
6560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6562 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6563 IEM_MC_FETCH_EFLAGS(EFlags);
6564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6565
6566 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6567 IEM_MC_COMMIT_EFLAGS(EFlags);
6568 IEM_MC_ADVANCE_RIP();
6569 IEM_MC_END();
6570 }
6571 return VINF_SUCCESS;
6572}
6573
6574
6575
6576/**
6577 * @opcode 0xd1
6578 */
6579FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6580{
6581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6582 PCIEMOPSHIFTSIZES pImpl;
6583 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6584 {
6585 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6586 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6587 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6588 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6589 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6590 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6591 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6592 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6593 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6594 }
6595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6596
6597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6598 {
6599 /* register */
6600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6601 switch (pVCpu->iem.s.enmEffOpSize)
6602 {
6603 case IEMMODE_16BIT:
6604 IEM_MC_BEGIN(3, 0);
6605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6606 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6607 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6608 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6609 IEM_MC_REF_EFLAGS(pEFlags);
6610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6611 IEM_MC_ADVANCE_RIP();
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614
6615 case IEMMODE_32BIT:
6616 IEM_MC_BEGIN(3, 0);
6617 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6618 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6619 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6620 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6621 IEM_MC_REF_EFLAGS(pEFlags);
6622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6623 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6624 IEM_MC_ADVANCE_RIP();
6625 IEM_MC_END();
6626 return VINF_SUCCESS;
6627
6628 case IEMMODE_64BIT:
6629 IEM_MC_BEGIN(3, 0);
6630 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6631 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6632 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6633 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6634 IEM_MC_REF_EFLAGS(pEFlags);
6635 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6636 IEM_MC_ADVANCE_RIP();
6637 IEM_MC_END();
6638 return VINF_SUCCESS;
6639
6640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6641 }
6642 }
6643 else
6644 {
6645 /* memory */
6646 switch (pVCpu->iem.s.enmEffOpSize)
6647 {
6648 case IEMMODE_16BIT:
6649 IEM_MC_BEGIN(3, 2);
6650 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6651 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6652 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6654
6655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6657 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6658 IEM_MC_FETCH_EFLAGS(EFlags);
6659 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6660
6661 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6662 IEM_MC_COMMIT_EFLAGS(EFlags);
6663 IEM_MC_ADVANCE_RIP();
6664 IEM_MC_END();
6665 return VINF_SUCCESS;
6666
6667 case IEMMODE_32BIT:
6668 IEM_MC_BEGIN(3, 2);
6669 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6670 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6671 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6673
6674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6676 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6677 IEM_MC_FETCH_EFLAGS(EFlags);
6678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6679
6680 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6681 IEM_MC_COMMIT_EFLAGS(EFlags);
6682 IEM_MC_ADVANCE_RIP();
6683 IEM_MC_END();
6684 return VINF_SUCCESS;
6685
6686 case IEMMODE_64BIT:
6687 IEM_MC_BEGIN(3, 2);
6688 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6689 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6690 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6692
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6696 IEM_MC_FETCH_EFLAGS(EFlags);
6697 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6698
6699 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6700 IEM_MC_COMMIT_EFLAGS(EFlags);
6701 IEM_MC_ADVANCE_RIP();
6702 IEM_MC_END();
6703 return VINF_SUCCESS;
6704
6705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6706 }
6707 }
6708}
6709
6710
6711/**
6712 * @opcode 0xd2
6713 */
6714FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6715{
6716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6717 PCIEMOPSHIFTSIZES pImpl;
6718 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6719 {
6720 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6721 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6722 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6723 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6724 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6725 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6726 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6727 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6728 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6729 }
6730 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6731
6732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6733 {
6734 /* register */
6735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6736 IEM_MC_BEGIN(3, 0);
6737 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6738 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6739 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6740 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6741 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6742 IEM_MC_REF_EFLAGS(pEFlags);
6743 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6744 IEM_MC_ADVANCE_RIP();
6745 IEM_MC_END();
6746 }
6747 else
6748 {
6749 /* memory */
6750 IEM_MC_BEGIN(3, 2);
6751 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6752 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6753 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6755
6756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6758 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6759 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6760 IEM_MC_FETCH_EFLAGS(EFlags);
6761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6762
6763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6764 IEM_MC_COMMIT_EFLAGS(EFlags);
6765 IEM_MC_ADVANCE_RIP();
6766 IEM_MC_END();
6767 }
6768 return VINF_SUCCESS;
6769}
6770
6771
6772/**
6773 * @opcode 0xd3
6774 */
6775FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6776{
6777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6778 PCIEMOPSHIFTSIZES pImpl;
6779 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6780 {
6781 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6782 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6783 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6784 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6785 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6786 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6787 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6788 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6790 }
6791 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6792
6793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6794 {
6795 /* register */
6796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6797 switch (pVCpu->iem.s.enmEffOpSize)
6798 {
6799 case IEMMODE_16BIT:
6800 IEM_MC_BEGIN(3, 0);
6801 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6802 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6803 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6804 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6805 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6806 IEM_MC_REF_EFLAGS(pEFlags);
6807 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6808 IEM_MC_ADVANCE_RIP();
6809 IEM_MC_END();
6810 return VINF_SUCCESS;
6811
6812 case IEMMODE_32BIT:
6813 IEM_MC_BEGIN(3, 0);
6814 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6815 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6816 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6817 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6818 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6819 IEM_MC_REF_EFLAGS(pEFlags);
6820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6821 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6822 IEM_MC_ADVANCE_RIP();
6823 IEM_MC_END();
6824 return VINF_SUCCESS;
6825
6826 case IEMMODE_64BIT:
6827 IEM_MC_BEGIN(3, 0);
6828 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6829 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6830 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6831 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6832 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6833 IEM_MC_REF_EFLAGS(pEFlags);
6834 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6835 IEM_MC_ADVANCE_RIP();
6836 IEM_MC_END();
6837 return VINF_SUCCESS;
6838
6839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6840 }
6841 }
6842 else
6843 {
6844 /* memory */
6845 switch (pVCpu->iem.s.enmEffOpSize)
6846 {
6847 case IEMMODE_16BIT:
6848 IEM_MC_BEGIN(3, 2);
6849 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6850 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6851 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6853
6854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6856 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6857 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6858 IEM_MC_FETCH_EFLAGS(EFlags);
6859 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6860
6861 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6862 IEM_MC_COMMIT_EFLAGS(EFlags);
6863 IEM_MC_ADVANCE_RIP();
6864 IEM_MC_END();
6865 return VINF_SUCCESS;
6866
6867 case IEMMODE_32BIT:
6868 IEM_MC_BEGIN(3, 2);
6869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6870 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6871 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6873
6874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6876 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6877 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6878 IEM_MC_FETCH_EFLAGS(EFlags);
6879 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6880
6881 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6882 IEM_MC_COMMIT_EFLAGS(EFlags);
6883 IEM_MC_ADVANCE_RIP();
6884 IEM_MC_END();
6885 return VINF_SUCCESS;
6886
6887 case IEMMODE_64BIT:
6888 IEM_MC_BEGIN(3, 2);
6889 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6890 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6891 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6893
6894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6896 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6897 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6898 IEM_MC_FETCH_EFLAGS(EFlags);
6899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6900
6901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6902 IEM_MC_COMMIT_EFLAGS(EFlags);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 return VINF_SUCCESS;
6906
6907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6908 }
6909 }
6910}
6911
6912/**
6913 * @opcode 0xd4
6914 */
6915FNIEMOP_DEF(iemOp_aam_Ib)
6916{
6917 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6918 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6920 IEMOP_HLP_NO_64BIT();
6921 if (!bImm)
6922 return IEMOP_RAISE_DIVIDE_ERROR();
6923 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6924}
6925
6926
6927/**
6928 * @opcode 0xd5
6929 */
6930FNIEMOP_DEF(iemOp_aad_Ib)
6931{
6932 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6933 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6935 IEMOP_HLP_NO_64BIT();
6936 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6937}
6938
6939
6940/**
6941 * @opcode 0xd6
6942 */
6943FNIEMOP_DEF(iemOp_salc)
6944{
6945 IEMOP_MNEMONIC(salc, "salc");
6946 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6947 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6949 IEMOP_HLP_NO_64BIT();
6950
6951 IEM_MC_BEGIN(0, 0);
6952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6953 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6954 } IEM_MC_ELSE() {
6955 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6956 } IEM_MC_ENDIF();
6957 IEM_MC_ADVANCE_RIP();
6958 IEM_MC_END();
6959 return VINF_SUCCESS;
6960}
6961
6962
6963/**
6964 * @opcode 0xd7
6965 */
6966FNIEMOP_DEF(iemOp_xlat)
6967{
6968 IEMOP_MNEMONIC(xlat, "xlat");
6969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6970 switch (pVCpu->iem.s.enmEffAddrMode)
6971 {
6972 case IEMMODE_16BIT:
6973 IEM_MC_BEGIN(2, 0);
6974 IEM_MC_LOCAL(uint8_t, u8Tmp);
6975 IEM_MC_LOCAL(uint16_t, u16Addr);
6976 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6977 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6978 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6979 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6980 IEM_MC_ADVANCE_RIP();
6981 IEM_MC_END();
6982 return VINF_SUCCESS;
6983
6984 case IEMMODE_32BIT:
6985 IEM_MC_BEGIN(2, 0);
6986 IEM_MC_LOCAL(uint8_t, u8Tmp);
6987 IEM_MC_LOCAL(uint32_t, u32Addr);
6988 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6989 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6990 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6991 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6992 IEM_MC_ADVANCE_RIP();
6993 IEM_MC_END();
6994 return VINF_SUCCESS;
6995
6996 case IEMMODE_64BIT:
6997 IEM_MC_BEGIN(2, 0);
6998 IEM_MC_LOCAL(uint8_t, u8Tmp);
6999 IEM_MC_LOCAL(uint64_t, u64Addr);
7000 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7001 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7002 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7003 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7004 IEM_MC_ADVANCE_RIP();
7005 IEM_MC_END();
7006 return VINF_SUCCESS;
7007
7008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7009 }
7010}
7011
7012
7013/**
7014 * Common worker for FPU instructions working on ST0 and STn, and storing the
7015 * result in ST0.
7016 *
7017 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7018 */
7019FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7020{
7021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7022
7023 IEM_MC_BEGIN(3, 1);
7024 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7025 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7027 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7028
7029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7031 IEM_MC_PREPARE_FPU_USAGE();
7032 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7033 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7034 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7035 IEM_MC_ELSE()
7036 IEM_MC_FPU_STACK_UNDERFLOW(0);
7037 IEM_MC_ENDIF();
7038 IEM_MC_ADVANCE_RIP();
7039
7040 IEM_MC_END();
7041 return VINF_SUCCESS;
7042}
7043
7044
7045/**
7046 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7047 * flags.
7048 *
7049 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7050 */
7051FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7052{
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7054
7055 IEM_MC_BEGIN(3, 1);
7056 IEM_MC_LOCAL(uint16_t, u16Fsw);
7057 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7058 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7059 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7060
7061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7063 IEM_MC_PREPARE_FPU_USAGE();
7064 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7065 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7066 IEM_MC_UPDATE_FSW(u16Fsw);
7067 IEM_MC_ELSE()
7068 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7069 IEM_MC_ENDIF();
7070 IEM_MC_ADVANCE_RIP();
7071
7072 IEM_MC_END();
7073 return VINF_SUCCESS;
7074}
7075
7076
7077/**
7078 * Common worker for FPU instructions working on ST0 and STn, only affecting
7079 * flags, and popping when done.
7080 *
7081 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7082 */
7083FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7084{
7085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7086
7087 IEM_MC_BEGIN(3, 1);
7088 IEM_MC_LOCAL(uint16_t, u16Fsw);
7089 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7091 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7092
7093 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7094 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7095 IEM_MC_PREPARE_FPU_USAGE();
7096 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7097 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7098 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7099 IEM_MC_ELSE()
7100 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7101 IEM_MC_ENDIF();
7102 IEM_MC_ADVANCE_RIP();
7103
7104 IEM_MC_END();
7105 return VINF_SUCCESS;
7106}
7107
7108
7109/** Opcode 0xd8 11/0. */
7110FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7111{
7112 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7113 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7114}
7115
7116
7117/** Opcode 0xd8 11/1. */
7118FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7119{
7120 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7121 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7122}
7123
7124
7125/** Opcode 0xd8 11/2. */
7126FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7127{
7128 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7129 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7130}
7131
7132
7133/** Opcode 0xd8 11/3. */
7134FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7135{
7136 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7137 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7138}
7139
7140
7141/** Opcode 0xd8 11/4. */
7142FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7143{
7144 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7146}
7147
7148
7149/** Opcode 0xd8 11/5. */
7150FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7151{
7152 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7153 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7154}
7155
7156
7157/** Opcode 0xd8 11/6. */
7158FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7159{
7160 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7161 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7162}
7163
7164
7165/** Opcode 0xd8 11/7. */
7166FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7167{
7168 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7169 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7170}
7171
7172
7173/**
7174 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7175 * the result in ST0.
7176 *
7177 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7178 */
7179FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7180{
7181 IEM_MC_BEGIN(3, 3);
7182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7183 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7184 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7185 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7186 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7187 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7188
7189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7191
7192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7193 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7194 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7195
7196 IEM_MC_PREPARE_FPU_USAGE();
7197 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7198 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7199 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7200 IEM_MC_ELSE()
7201 IEM_MC_FPU_STACK_UNDERFLOW(0);
7202 IEM_MC_ENDIF();
7203 IEM_MC_ADVANCE_RIP();
7204
7205 IEM_MC_END();
7206 return VINF_SUCCESS;
7207}
7208
7209
7210/** Opcode 0xd8 !11/0. */
7211FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7212{
7213 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7214 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7215}
7216
7217
7218/** Opcode 0xd8 !11/1. */
7219FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7220{
7221 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7222 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7223}
7224
7225
7226/** Opcode 0xd8 !11/2. */
7227FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7228{
7229 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7230
7231 IEM_MC_BEGIN(3, 3);
7232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7233 IEM_MC_LOCAL(uint16_t, u16Fsw);
7234 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7235 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7237 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7238
7239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7241
7242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7244 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7245
7246 IEM_MC_PREPARE_FPU_USAGE();
7247 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7248 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7249 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7250 IEM_MC_ELSE()
7251 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7252 IEM_MC_ENDIF();
7253 IEM_MC_ADVANCE_RIP();
7254
7255 IEM_MC_END();
7256 return VINF_SUCCESS;
7257}
7258
7259
7260/** Opcode 0xd8 !11/3. */
7261FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7262{
7263 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7264
7265 IEM_MC_BEGIN(3, 3);
7266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7267 IEM_MC_LOCAL(uint16_t, u16Fsw);
7268 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7269 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7270 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7271 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7272
7273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7275
7276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7278 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7279
7280 IEM_MC_PREPARE_FPU_USAGE();
7281 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7282 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7283 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7284 IEM_MC_ELSE()
7285 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7286 IEM_MC_ENDIF();
7287 IEM_MC_ADVANCE_RIP();
7288
7289 IEM_MC_END();
7290 return VINF_SUCCESS;
7291}
7292
7293
7294/** Opcode 0xd8 !11/4. */
7295FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7296{
7297 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7298 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7299}
7300
7301
7302/** Opcode 0xd8 !11/5. */
7303FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7304{
7305 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7306 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7307}
7308
7309
7310/** Opcode 0xd8 !11/6. */
7311FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7312{
7313 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7314 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7315}
7316
7317
7318/** Opcode 0xd8 !11/7. */
7319FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7320{
7321 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7322 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7323}
7324
7325
7326/**
7327 * @opcode 0xd8
7328 */
7329FNIEMOP_DEF(iemOp_EscF0)
7330{
7331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7332 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7333
7334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7335 {
7336 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7337 {
7338 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7339 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7340 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7341 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7342 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7343 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7344 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7345 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7347 }
7348 }
7349 else
7350 {
7351 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7352 {
7353 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7354 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7355 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7356 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7357 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7358 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7359 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7360 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7362 }
7363 }
7364}
7365
7366
7367/** Opcode 0xd9 /0 mem32real
7368 * @sa iemOp_fld_m64r */
7369FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7370{
7371 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7372
7373 IEM_MC_BEGIN(2, 3);
7374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7375 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7376 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7377 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7378 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7379
7380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7382
7383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7385 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7386
7387 IEM_MC_PREPARE_FPU_USAGE();
7388 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7389 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7390 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7391 IEM_MC_ELSE()
7392 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7393 IEM_MC_ENDIF();
7394 IEM_MC_ADVANCE_RIP();
7395
7396 IEM_MC_END();
7397 return VINF_SUCCESS;
7398}
7399
7400
7401/** Opcode 0xd9 !11/2 mem32real */
7402FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7403{
7404 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7405 IEM_MC_BEGIN(3, 2);
7406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7407 IEM_MC_LOCAL(uint16_t, u16Fsw);
7408 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7409 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7410 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7411
7412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7416
7417 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7418 IEM_MC_PREPARE_FPU_USAGE();
7419 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7420 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7421 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7422 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7423 IEM_MC_ELSE()
7424 IEM_MC_IF_FCW_IM()
7425 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7426 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7427 IEM_MC_ENDIF();
7428 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7429 IEM_MC_ENDIF();
7430 IEM_MC_ADVANCE_RIP();
7431
7432 IEM_MC_END();
7433 return VINF_SUCCESS;
7434}
7435
7436
7437/** Opcode 0xd9 !11/3 */
7438FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7439{
7440 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7441 IEM_MC_BEGIN(3, 2);
7442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7443 IEM_MC_LOCAL(uint16_t, u16Fsw);
7444 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7445 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7446 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7447
7448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7450 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7451 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7452
7453 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7454 IEM_MC_PREPARE_FPU_USAGE();
7455 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7456 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7457 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7458 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7459 IEM_MC_ELSE()
7460 IEM_MC_IF_FCW_IM()
7461 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7462 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7463 IEM_MC_ENDIF();
7464 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7465 IEM_MC_ENDIF();
7466 IEM_MC_ADVANCE_RIP();
7467
7468 IEM_MC_END();
7469 return VINF_SUCCESS;
7470}
7471
7472
7473/** Opcode 0xd9 !11/4 */
7474FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7475{
7476 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7477 IEM_MC_BEGIN(3, 0);
7478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7479 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7480 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7483 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7484 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7485 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7486 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7487 IEM_MC_END();
7488 return VINF_SUCCESS;
7489}
7490
7491
7492/** Opcode 0xd9 !11/5 */
7493FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7494{
7495 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7496 IEM_MC_BEGIN(1, 1);
7497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7498 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7502 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7503 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7504 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7505 IEM_MC_END();
7506 return VINF_SUCCESS;
7507}
7508
7509
7510/** Opcode 0xd9 !11/6 */
7511FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7512{
7513 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7514 IEM_MC_BEGIN(3, 0);
7515 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7516 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7517 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7520 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7521 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7522 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7523 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7524 IEM_MC_END();
7525 return VINF_SUCCESS;
7526}
7527
7528
7529/** Opcode 0xd9 !11/7 */
7530FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7531{
7532 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7533 IEM_MC_BEGIN(2, 0);
7534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7535 IEM_MC_LOCAL(uint16_t, u16Fcw);
7536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7540 IEM_MC_FETCH_FCW(u16Fcw);
7541 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7542 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7543 IEM_MC_END();
7544 return VINF_SUCCESS;
7545}
7546
7547
7548/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7549FNIEMOP_DEF(iemOp_fnop)
7550{
7551 IEMOP_MNEMONIC(fnop, "fnop");
7552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7553
7554 IEM_MC_BEGIN(0, 0);
7555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7556 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7557 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7558 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7559 * intel optimizations. Investigate. */
7560 IEM_MC_UPDATE_FPU_OPCODE_IP();
7561 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7562 IEM_MC_END();
7563 return VINF_SUCCESS;
7564}
7565
7566
7567/** Opcode 0xd9 11/0 stN */
7568FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7569{
7570 IEMOP_MNEMONIC(fld_stN, "fld stN");
7571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7572
7573 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7574 * indicates that it does. */
7575 IEM_MC_BEGIN(0, 2);
7576 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7577 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7580
7581 IEM_MC_PREPARE_FPU_USAGE();
7582 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7583 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7584 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7585 IEM_MC_ELSE()
7586 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7587 IEM_MC_ENDIF();
7588
7589 IEM_MC_ADVANCE_RIP();
7590 IEM_MC_END();
7591
7592 return VINF_SUCCESS;
7593}
7594
7595
7596/** Opcode 0xd9 11/3 stN */
7597FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7598{
7599 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7601
7602 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7603 * indicates that it does. */
7604 IEM_MC_BEGIN(1, 3);
7605 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7606 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7607 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7608 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7611
7612 IEM_MC_PREPARE_FPU_USAGE();
7613 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7614 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7615 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7616 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7617 IEM_MC_ELSE()
7618 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7619 IEM_MC_ENDIF();
7620
7621 IEM_MC_ADVANCE_RIP();
7622 IEM_MC_END();
7623
7624 return VINF_SUCCESS;
7625}
7626
7627
7628/** Opcode 0xd9 11/4, 0xdd 11/2. */
7629FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7630{
7631 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7633
7634 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7635 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7636 if (!iDstReg)
7637 {
7638 IEM_MC_BEGIN(0, 1);
7639 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7642
7643 IEM_MC_PREPARE_FPU_USAGE();
7644 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7645 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7646 IEM_MC_ELSE()
7647 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7648 IEM_MC_ENDIF();
7649
7650 IEM_MC_ADVANCE_RIP();
7651 IEM_MC_END();
7652 }
7653 else
7654 {
7655 IEM_MC_BEGIN(0, 2);
7656 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7657 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7660
7661 IEM_MC_PREPARE_FPU_USAGE();
7662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7663 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7664 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7665 IEM_MC_ELSE()
7666 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7667 IEM_MC_ENDIF();
7668
7669 IEM_MC_ADVANCE_RIP();
7670 IEM_MC_END();
7671 }
7672 return VINF_SUCCESS;
7673}
7674
7675
7676/**
7677 * Common worker for FPU instructions working on ST0 and replaces it with the
7678 * result, i.e. unary operators.
7679 *
7680 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7681 */
7682FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7683{
7684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7685
7686 IEM_MC_BEGIN(2, 1);
7687 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7688 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7689 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7690
7691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7693 IEM_MC_PREPARE_FPU_USAGE();
7694 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7695 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7696 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7697 IEM_MC_ELSE()
7698 IEM_MC_FPU_STACK_UNDERFLOW(0);
7699 IEM_MC_ENDIF();
7700 IEM_MC_ADVANCE_RIP();
7701
7702 IEM_MC_END();
7703 return VINF_SUCCESS;
7704}
7705
7706
7707/** Opcode 0xd9 0xe0. */
7708FNIEMOP_DEF(iemOp_fchs)
7709{
7710 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7711 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7712}
7713
7714
7715/** Opcode 0xd9 0xe1. */
7716FNIEMOP_DEF(iemOp_fabs)
7717{
7718 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7719 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7720}
7721
7722
7723/**
7724 * Common worker for FPU instructions working on ST0 and only returns FSW.
7725 *
7726 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7727 */
7728FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7729{
7730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7731
7732 IEM_MC_BEGIN(2, 1);
7733 IEM_MC_LOCAL(uint16_t, u16Fsw);
7734 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7736
7737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7739 IEM_MC_PREPARE_FPU_USAGE();
7740 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7741 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7742 IEM_MC_UPDATE_FSW(u16Fsw);
7743 IEM_MC_ELSE()
7744 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7745 IEM_MC_ENDIF();
7746 IEM_MC_ADVANCE_RIP();
7747
7748 IEM_MC_END();
7749 return VINF_SUCCESS;
7750}
7751
7752
7753/** Opcode 0xd9 0xe4. */
7754FNIEMOP_DEF(iemOp_ftst)
7755{
7756 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7757 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7758}
7759
7760
7761/** Opcode 0xd9 0xe5. */
7762FNIEMOP_DEF(iemOp_fxam)
7763{
7764 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7765 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7766}
7767
7768
7769/**
7770 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7771 *
7772 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7773 */
7774FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7775{
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777
7778 IEM_MC_BEGIN(1, 1);
7779 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7780 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7781
7782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7784 IEM_MC_PREPARE_FPU_USAGE();
7785 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7786 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7787 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7788 IEM_MC_ELSE()
7789 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7790 IEM_MC_ENDIF();
7791 IEM_MC_ADVANCE_RIP();
7792
7793 IEM_MC_END();
7794 return VINF_SUCCESS;
7795}
7796
7797
7798/** Opcode 0xd9 0xe8. */
7799FNIEMOP_DEF(iemOp_fld1)
7800{
7801 IEMOP_MNEMONIC(fld1, "fld1");
7802 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7803}
7804
7805
7806/** Opcode 0xd9 0xe9. */
7807FNIEMOP_DEF(iemOp_fldl2t)
7808{
7809 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7810 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7811}
7812
7813
7814/** Opcode 0xd9 0xea. */
7815FNIEMOP_DEF(iemOp_fldl2e)
7816{
7817 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7818 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7819}
7820
7821/** Opcode 0xd9 0xeb. */
7822FNIEMOP_DEF(iemOp_fldpi)
7823{
7824 IEMOP_MNEMONIC(fldpi, "fldpi");
7825 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7826}
7827
7828
7829/** Opcode 0xd9 0xec. */
7830FNIEMOP_DEF(iemOp_fldlg2)
7831{
7832 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7833 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7834}
7835
7836/** Opcode 0xd9 0xed. */
7837FNIEMOP_DEF(iemOp_fldln2)
7838{
7839 IEMOP_MNEMONIC(fldln2, "fldln2");
7840 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7841}
7842
7843
7844/** Opcode 0xd9 0xee. */
7845FNIEMOP_DEF(iemOp_fldz)
7846{
7847 IEMOP_MNEMONIC(fldz, "fldz");
7848 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7849}
7850
7851
7852/** Opcode 0xd9 0xf0. */
7853FNIEMOP_DEF(iemOp_f2xm1)
7854{
7855 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7856 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7857}
7858
7859
7860/**
7861 * Common worker for FPU instructions working on STn and ST0, storing the result
7862 * in STn, and popping the stack unless IE, DE or ZE was raised.
7863 *
7864 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7865 */
7866FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7867{
7868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7869
7870 IEM_MC_BEGIN(3, 1);
7871 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7872 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7875
7876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7878
7879 IEM_MC_PREPARE_FPU_USAGE();
7880 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7881 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7882 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7883 IEM_MC_ELSE()
7884 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7885 IEM_MC_ENDIF();
7886 IEM_MC_ADVANCE_RIP();
7887
7888 IEM_MC_END();
7889 return VINF_SUCCESS;
7890}
7891
7892
7893/** Opcode 0xd9 0xf1. */
7894FNIEMOP_DEF(iemOp_fyl2x)
7895{
7896 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7897 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7898}
7899
7900
7901/**
7902 * Common worker for FPU instructions working on ST0 and having two outputs, one
7903 * replacing ST0 and one pushed onto the stack.
7904 *
7905 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7906 */
7907FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7908{
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910
7911 IEM_MC_BEGIN(2, 1);
7912 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7913 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7914 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7915
7916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7917 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7918 IEM_MC_PREPARE_FPU_USAGE();
7919 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7920 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7921 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7922 IEM_MC_ELSE()
7923 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7924 IEM_MC_ENDIF();
7925 IEM_MC_ADVANCE_RIP();
7926
7927 IEM_MC_END();
7928 return VINF_SUCCESS;
7929}
7930
7931
7932/** Opcode 0xd9 0xf2. */
7933FNIEMOP_DEF(iemOp_fptan)
7934{
7935 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7936 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7937}
7938
7939
7940/** Opcode 0xd9 0xf3. */
7941FNIEMOP_DEF(iemOp_fpatan)
7942{
7943 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7944 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7945}
7946
7947
7948/** Opcode 0xd9 0xf4. */
7949FNIEMOP_DEF(iemOp_fxtract)
7950{
7951 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7952 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7953}
7954
7955
7956/** Opcode 0xd9 0xf5. */
7957FNIEMOP_DEF(iemOp_fprem1)
7958{
7959 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7960 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7961}
7962
7963
7964/** Opcode 0xd9 0xf6. */
7965FNIEMOP_DEF(iemOp_fdecstp)
7966{
7967 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7970 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7971 * FINCSTP and FDECSTP. */
7972
7973 IEM_MC_BEGIN(0,0);
7974
7975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7977
7978 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7979 IEM_MC_FPU_STACK_DEC_TOP();
7980 IEM_MC_UPDATE_FSW_CONST(0);
7981
7982 IEM_MC_ADVANCE_RIP();
7983 IEM_MC_END();
7984 return VINF_SUCCESS;
7985}
7986
7987
7988/** Opcode 0xd9 0xf7. */
7989FNIEMOP_DEF(iemOp_fincstp)
7990{
7991 IEMOP_MNEMONIC(fincstp, "fincstp");
7992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7993 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7994 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7995 * FINCSTP and FDECSTP. */
7996
7997 IEM_MC_BEGIN(0,0);
7998
7999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8001
8002 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8003 IEM_MC_FPU_STACK_INC_TOP();
8004 IEM_MC_UPDATE_FSW_CONST(0);
8005
8006 IEM_MC_ADVANCE_RIP();
8007 IEM_MC_END();
8008 return VINF_SUCCESS;
8009}
8010
8011
8012/** Opcode 0xd9 0xf8. */
8013FNIEMOP_DEF(iemOp_fprem)
8014{
8015 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8016 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8017}
8018
8019
8020/** Opcode 0xd9 0xf9. */
8021FNIEMOP_DEF(iemOp_fyl2xp1)
8022{
8023 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8024 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8025}
8026
8027
8028/** Opcode 0xd9 0xfa. */
8029FNIEMOP_DEF(iemOp_fsqrt)
8030{
8031 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8032 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8033}
8034
8035
8036/** Opcode 0xd9 0xfb. */
8037FNIEMOP_DEF(iemOp_fsincos)
8038{
8039 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8040 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8041}
8042
8043
8044/** Opcode 0xd9 0xfc. */
8045FNIEMOP_DEF(iemOp_frndint)
8046{
8047 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8048 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8049}
8050
8051
8052/** Opcode 0xd9 0xfd. */
8053FNIEMOP_DEF(iemOp_fscale)
8054{
8055 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8056 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8057}
8058
8059
8060/** Opcode 0xd9 0xfe. */
8061FNIEMOP_DEF(iemOp_fsin)
8062{
8063 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8064 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8065}
8066
8067
8068/** Opcode 0xd9 0xff. */
8069FNIEMOP_DEF(iemOp_fcos)
8070{
8071 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8072 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8073}
8074
8075
8076/** Used by iemOp_EscF1. */
8077IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8078{
8079 /* 0xe0 */ iemOp_fchs,
8080 /* 0xe1 */ iemOp_fabs,
8081 /* 0xe2 */ iemOp_Invalid,
8082 /* 0xe3 */ iemOp_Invalid,
8083 /* 0xe4 */ iemOp_ftst,
8084 /* 0xe5 */ iemOp_fxam,
8085 /* 0xe6 */ iemOp_Invalid,
8086 /* 0xe7 */ iemOp_Invalid,
8087 /* 0xe8 */ iemOp_fld1,
8088 /* 0xe9 */ iemOp_fldl2t,
8089 /* 0xea */ iemOp_fldl2e,
8090 /* 0xeb */ iemOp_fldpi,
8091 /* 0xec */ iemOp_fldlg2,
8092 /* 0xed */ iemOp_fldln2,
8093 /* 0xee */ iemOp_fldz,
8094 /* 0xef */ iemOp_Invalid,
8095 /* 0xf0 */ iemOp_f2xm1,
8096 /* 0xf1 */ iemOp_fyl2x,
8097 /* 0xf2 */ iemOp_fptan,
8098 /* 0xf3 */ iemOp_fpatan,
8099 /* 0xf4 */ iemOp_fxtract,
8100 /* 0xf5 */ iemOp_fprem1,
8101 /* 0xf6 */ iemOp_fdecstp,
8102 /* 0xf7 */ iemOp_fincstp,
8103 /* 0xf8 */ iemOp_fprem,
8104 /* 0xf9 */ iemOp_fyl2xp1,
8105 /* 0xfa */ iemOp_fsqrt,
8106 /* 0xfb */ iemOp_fsincos,
8107 /* 0xfc */ iemOp_frndint,
8108 /* 0xfd */ iemOp_fscale,
8109 /* 0xfe */ iemOp_fsin,
8110 /* 0xff */ iemOp_fcos
8111};
8112
8113
8114/**
8115 * @opcode 0xd9
8116 */
8117FNIEMOP_DEF(iemOp_EscF1)
8118{
8119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8120 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8121
8122 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8123 {
8124 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8125 {
8126 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8127 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8128 case 2:
8129 if (bRm == 0xd0)
8130 return FNIEMOP_CALL(iemOp_fnop);
8131 return IEMOP_RAISE_INVALID_OPCODE();
8132 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8133 case 4:
8134 case 5:
8135 case 6:
8136 case 7:
8137 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8138 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8140 }
8141 }
8142 else
8143 {
8144 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8145 {
8146 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8147 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8148 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8149 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8150 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8151 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8152 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8153 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8155 }
8156 }
8157}
8158
8159
8160/** Opcode 0xda 11/0. */
8161FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8162{
8163 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165
8166 IEM_MC_BEGIN(0, 1);
8167 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8168
8169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8171
8172 IEM_MC_PREPARE_FPU_USAGE();
8173 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8174 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8175 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8176 IEM_MC_ENDIF();
8177 IEM_MC_UPDATE_FPU_OPCODE_IP();
8178 IEM_MC_ELSE()
8179 IEM_MC_FPU_STACK_UNDERFLOW(0);
8180 IEM_MC_ENDIF();
8181 IEM_MC_ADVANCE_RIP();
8182
8183 IEM_MC_END();
8184 return VINF_SUCCESS;
8185}
8186
8187
8188/** Opcode 0xda 11/1. */
8189FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8190{
8191 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193
8194 IEM_MC_BEGIN(0, 1);
8195 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8196
8197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8199
8200 IEM_MC_PREPARE_FPU_USAGE();
8201 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8202 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8203 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8204 IEM_MC_ENDIF();
8205 IEM_MC_UPDATE_FPU_OPCODE_IP();
8206 IEM_MC_ELSE()
8207 IEM_MC_FPU_STACK_UNDERFLOW(0);
8208 IEM_MC_ENDIF();
8209 IEM_MC_ADVANCE_RIP();
8210
8211 IEM_MC_END();
8212 return VINF_SUCCESS;
8213}
8214
8215
8216/** Opcode 0xda 11/2. */
8217FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8218{
8219 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8221
8222 IEM_MC_BEGIN(0, 1);
8223 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8224
8225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8226 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8227
8228 IEM_MC_PREPARE_FPU_USAGE();
8229 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8230 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8231 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8232 IEM_MC_ENDIF();
8233 IEM_MC_UPDATE_FPU_OPCODE_IP();
8234 IEM_MC_ELSE()
8235 IEM_MC_FPU_STACK_UNDERFLOW(0);
8236 IEM_MC_ENDIF();
8237 IEM_MC_ADVANCE_RIP();
8238
8239 IEM_MC_END();
8240 return VINF_SUCCESS;
8241}
8242
8243
8244/** Opcode 0xda 11/3. */
8245FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8246{
8247 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8249
8250 IEM_MC_BEGIN(0, 1);
8251 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8252
8253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8255
8256 IEM_MC_PREPARE_FPU_USAGE();
8257 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8259 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8260 IEM_MC_ENDIF();
8261 IEM_MC_UPDATE_FPU_OPCODE_IP();
8262 IEM_MC_ELSE()
8263 IEM_MC_FPU_STACK_UNDERFLOW(0);
8264 IEM_MC_ENDIF();
8265 IEM_MC_ADVANCE_RIP();
8266
8267 IEM_MC_END();
8268 return VINF_SUCCESS;
8269}
8270
8271
8272/**
8273 * Common worker for FPU instructions working on ST0 and STn, only affecting
8274 * flags, and popping twice when done.
8275 *
8276 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8277 */
8278FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8279{
8280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8281
8282 IEM_MC_BEGIN(3, 1);
8283 IEM_MC_LOCAL(uint16_t, u16Fsw);
8284 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8285 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8286 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8287
8288 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8289 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8290
8291 IEM_MC_PREPARE_FPU_USAGE();
8292 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8293 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8294 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8295 IEM_MC_ELSE()
8296 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8297 IEM_MC_ENDIF();
8298 IEM_MC_ADVANCE_RIP();
8299
8300 IEM_MC_END();
8301 return VINF_SUCCESS;
8302}
8303
8304
8305/** Opcode 0xda 0xe9. */
8306FNIEMOP_DEF(iemOp_fucompp)
8307{
8308 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8309 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8310}
8311
8312
8313/**
8314 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8315 * the result in ST0.
8316 *
8317 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8318 */
8319FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8320{
8321 IEM_MC_BEGIN(3, 3);
8322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8323 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8324 IEM_MC_LOCAL(int32_t, i32Val2);
8325 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8327 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8328
8329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8331
8332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8333 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8334 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8335
8336 IEM_MC_PREPARE_FPU_USAGE();
8337 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8338 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8339 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8340 IEM_MC_ELSE()
8341 IEM_MC_FPU_STACK_UNDERFLOW(0);
8342 IEM_MC_ENDIF();
8343 IEM_MC_ADVANCE_RIP();
8344
8345 IEM_MC_END();
8346 return VINF_SUCCESS;
8347}
8348
8349
8350/** Opcode 0xda !11/0. */
8351FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8352{
8353 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8354 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8355}
8356
8357
8358/** Opcode 0xda !11/1. */
8359FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8360{
8361 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8362 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8363}
8364
8365
8366/** Opcode 0xda !11/2. */
8367FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8368{
8369 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8370
8371 IEM_MC_BEGIN(3, 3);
8372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8373 IEM_MC_LOCAL(uint16_t, u16Fsw);
8374 IEM_MC_LOCAL(int32_t, i32Val2);
8375 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8376 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8377 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8378
8379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8381
8382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8384 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8385
8386 IEM_MC_PREPARE_FPU_USAGE();
8387 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8388 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8389 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8390 IEM_MC_ELSE()
8391 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8392 IEM_MC_ENDIF();
8393 IEM_MC_ADVANCE_RIP();
8394
8395 IEM_MC_END();
8396 return VINF_SUCCESS;
8397}
8398
8399
8400/** Opcode 0xda !11/3. */
8401FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8402{
8403 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8404
8405 IEM_MC_BEGIN(3, 3);
8406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8407 IEM_MC_LOCAL(uint16_t, u16Fsw);
8408 IEM_MC_LOCAL(int32_t, i32Val2);
8409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8410 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8411 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8412
8413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8415
8416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8418 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8419
8420 IEM_MC_PREPARE_FPU_USAGE();
8421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8422 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8423 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8424 IEM_MC_ELSE()
8425 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8426 IEM_MC_ENDIF();
8427 IEM_MC_ADVANCE_RIP();
8428
8429 IEM_MC_END();
8430 return VINF_SUCCESS;
8431}
8432
8433
8434/** Opcode 0xda !11/4. */
8435FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8436{
8437 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8438 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8439}
8440
8441
8442/** Opcode 0xda !11/5. */
8443FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8444{
8445 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8446 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8447}
8448
8449
8450/** Opcode 0xda !11/6. */
8451FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8452{
8453 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8454 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8455}
8456
8457
8458/** Opcode 0xda !11/7. */
8459FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8460{
8461 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8462 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8463}
8464
8465
8466/**
8467 * @opcode 0xda
8468 */
8469FNIEMOP_DEF(iemOp_EscF2)
8470{
8471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8472 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8474 {
8475 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8476 {
8477 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8478 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8479 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8480 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8481 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8482 case 5:
8483 if (bRm == 0xe9)
8484 return FNIEMOP_CALL(iemOp_fucompp);
8485 return IEMOP_RAISE_INVALID_OPCODE();
8486 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8487 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8489 }
8490 }
8491 else
8492 {
8493 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8494 {
8495 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8496 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8497 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8498 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8499 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8500 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8501 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8502 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8504 }
8505 }
8506}
8507
8508
8509/** Opcode 0xdb !11/0. */
8510FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8511{
8512 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8513
8514 IEM_MC_BEGIN(2, 3);
8515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8516 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8517 IEM_MC_LOCAL(int32_t, i32Val);
8518 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8519 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8520
8521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8523
8524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8526 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8527
8528 IEM_MC_PREPARE_FPU_USAGE();
8529 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8530 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8531 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8532 IEM_MC_ELSE()
8533 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8534 IEM_MC_ENDIF();
8535 IEM_MC_ADVANCE_RIP();
8536
8537 IEM_MC_END();
8538 return VINF_SUCCESS;
8539}
8540
8541
8542/** Opcode 0xdb !11/1. */
8543FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8544{
8545 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8546 IEM_MC_BEGIN(3, 2);
8547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8548 IEM_MC_LOCAL(uint16_t, u16Fsw);
8549 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8550 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8551 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8552
8553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8556 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8557
8558 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8559 IEM_MC_PREPARE_FPU_USAGE();
8560 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8561 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8562 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8563 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8564 IEM_MC_ELSE()
8565 IEM_MC_IF_FCW_IM()
8566 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8567 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8568 IEM_MC_ENDIF();
8569 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8570 IEM_MC_ENDIF();
8571 IEM_MC_ADVANCE_RIP();
8572
8573 IEM_MC_END();
8574 return VINF_SUCCESS;
8575}
8576
8577
8578/** Opcode 0xdb !11/2. */
8579FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8580{
8581 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8582 IEM_MC_BEGIN(3, 2);
8583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8584 IEM_MC_LOCAL(uint16_t, u16Fsw);
8585 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8586 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8587 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8588
8589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8592 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8593
8594 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8595 IEM_MC_PREPARE_FPU_USAGE();
8596 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8597 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8598 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8599 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8600 IEM_MC_ELSE()
8601 IEM_MC_IF_FCW_IM()
8602 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8603 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8604 IEM_MC_ENDIF();
8605 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8606 IEM_MC_ENDIF();
8607 IEM_MC_ADVANCE_RIP();
8608
8609 IEM_MC_END();
8610 return VINF_SUCCESS;
8611}
8612
8613
8614/** Opcode 0xdb !11/3. */
8615FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8616{
8617 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8618 IEM_MC_BEGIN(3, 2);
8619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8620 IEM_MC_LOCAL(uint16_t, u16Fsw);
8621 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8622 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8623 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8624
8625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8629
8630 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8631 IEM_MC_PREPARE_FPU_USAGE();
8632 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8633 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8634 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8635 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8636 IEM_MC_ELSE()
8637 IEM_MC_IF_FCW_IM()
8638 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8639 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8640 IEM_MC_ENDIF();
8641 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8642 IEM_MC_ENDIF();
8643 IEM_MC_ADVANCE_RIP();
8644
8645 IEM_MC_END();
8646 return VINF_SUCCESS;
8647}
8648
8649
8650/** Opcode 0xdb !11/5. */
8651FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8652{
8653 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8654
8655 IEM_MC_BEGIN(2, 3);
8656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8657 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8658 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8659 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8660 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8661
8662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8664
8665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8667 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8668
8669 IEM_MC_PREPARE_FPU_USAGE();
8670 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8671 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8672 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8673 IEM_MC_ELSE()
8674 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8675 IEM_MC_ENDIF();
8676 IEM_MC_ADVANCE_RIP();
8677
8678 IEM_MC_END();
8679 return VINF_SUCCESS;
8680}
8681
8682
8683/** Opcode 0xdb !11/7. */
8684FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8685{
8686 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8687 IEM_MC_BEGIN(3, 2);
8688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8689 IEM_MC_LOCAL(uint16_t, u16Fsw);
8690 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8691 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8692 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8693
8694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8698
8699 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8700 IEM_MC_PREPARE_FPU_USAGE();
8701 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8702 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8703 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8704 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8705 IEM_MC_ELSE()
8706 IEM_MC_IF_FCW_IM()
8707 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8708 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8709 IEM_MC_ENDIF();
8710 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8711 IEM_MC_ENDIF();
8712 IEM_MC_ADVANCE_RIP();
8713
8714 IEM_MC_END();
8715 return VINF_SUCCESS;
8716}
8717
8718
8719/** Opcode 0xdb 11/0. */
8720FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8721{
8722 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8724
8725 IEM_MC_BEGIN(0, 1);
8726 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8727
8728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8730
8731 IEM_MC_PREPARE_FPU_USAGE();
8732 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8733 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8734 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8735 IEM_MC_ENDIF();
8736 IEM_MC_UPDATE_FPU_OPCODE_IP();
8737 IEM_MC_ELSE()
8738 IEM_MC_FPU_STACK_UNDERFLOW(0);
8739 IEM_MC_ENDIF();
8740 IEM_MC_ADVANCE_RIP();
8741
8742 IEM_MC_END();
8743 return VINF_SUCCESS;
8744}
8745
8746
8747/** Opcode 0xdb 11/1. */
8748FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8749{
8750 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8752
8753 IEM_MC_BEGIN(0, 1);
8754 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8755
8756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8758
8759 IEM_MC_PREPARE_FPU_USAGE();
8760 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8761 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8762 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8763 IEM_MC_ENDIF();
8764 IEM_MC_UPDATE_FPU_OPCODE_IP();
8765 IEM_MC_ELSE()
8766 IEM_MC_FPU_STACK_UNDERFLOW(0);
8767 IEM_MC_ENDIF();
8768 IEM_MC_ADVANCE_RIP();
8769
8770 IEM_MC_END();
8771 return VINF_SUCCESS;
8772}
8773
8774
8775/** Opcode 0xdb 11/2. */
8776FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8777{
8778 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8780
8781 IEM_MC_BEGIN(0, 1);
8782 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8783
8784 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8785 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8786
8787 IEM_MC_PREPARE_FPU_USAGE();
8788 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8789 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8790 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8791 IEM_MC_ENDIF();
8792 IEM_MC_UPDATE_FPU_OPCODE_IP();
8793 IEM_MC_ELSE()
8794 IEM_MC_FPU_STACK_UNDERFLOW(0);
8795 IEM_MC_ENDIF();
8796 IEM_MC_ADVANCE_RIP();
8797
8798 IEM_MC_END();
8799 return VINF_SUCCESS;
8800}
8801
8802
8803/** Opcode 0xdb 11/3. */
8804FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8805{
8806 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808
8809 IEM_MC_BEGIN(0, 1);
8810 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8811
8812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8814
8815 IEM_MC_PREPARE_FPU_USAGE();
8816 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8817 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8818 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8819 IEM_MC_ENDIF();
8820 IEM_MC_UPDATE_FPU_OPCODE_IP();
8821 IEM_MC_ELSE()
8822 IEM_MC_FPU_STACK_UNDERFLOW(0);
8823 IEM_MC_ENDIF();
8824 IEM_MC_ADVANCE_RIP();
8825
8826 IEM_MC_END();
8827 return VINF_SUCCESS;
8828}
8829
8830
8831/** Opcode 0xdb 0xe0. */
8832FNIEMOP_DEF(iemOp_fneni)
8833{
8834 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8836 IEM_MC_BEGIN(0,0);
8837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8838 IEM_MC_ADVANCE_RIP();
8839 IEM_MC_END();
8840 return VINF_SUCCESS;
8841}
8842
8843
8844/** Opcode 0xdb 0xe1. */
8845FNIEMOP_DEF(iemOp_fndisi)
8846{
8847 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8849 IEM_MC_BEGIN(0,0);
8850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8851 IEM_MC_ADVANCE_RIP();
8852 IEM_MC_END();
8853 return VINF_SUCCESS;
8854}
8855
8856
8857/** Opcode 0xdb 0xe2. */
8858FNIEMOP_DEF(iemOp_fnclex)
8859{
8860 IEMOP_MNEMONIC(fnclex, "fnclex");
8861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8862
8863 IEM_MC_BEGIN(0,0);
8864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8865 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8866 IEM_MC_CLEAR_FSW_EX();
8867 IEM_MC_ADVANCE_RIP();
8868 IEM_MC_END();
8869 return VINF_SUCCESS;
8870}
8871
8872
8873/** Opcode 0xdb 0xe3. */
8874FNIEMOP_DEF(iemOp_fninit)
8875{
8876 IEMOP_MNEMONIC(fninit, "fninit");
8877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8878 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8879}
8880
8881
8882/** Opcode 0xdb 0xe4. */
8883FNIEMOP_DEF(iemOp_fnsetpm)
8884{
8885 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887 IEM_MC_BEGIN(0,0);
8888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8889 IEM_MC_ADVANCE_RIP();
8890 IEM_MC_END();
8891 return VINF_SUCCESS;
8892}
8893
8894
8895/** Opcode 0xdb 0xe5. */
8896FNIEMOP_DEF(iemOp_frstpm)
8897{
8898 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8899#if 0 /* #UDs on newer CPUs */
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 IEM_MC_BEGIN(0,0);
8902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8903 IEM_MC_ADVANCE_RIP();
8904 IEM_MC_END();
8905 return VINF_SUCCESS;
8906#else
8907 return IEMOP_RAISE_INVALID_OPCODE();
8908#endif
8909}
8910
8911
8912/** Opcode 0xdb 11/5. */
8913FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8914{
8915 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8916 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8917}
8918
8919
8920/** Opcode 0xdb 11/6. */
8921FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8922{
8923 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8924 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8925}
8926
8927
8928/**
8929 * @opcode 0xdb
8930 */
8931FNIEMOP_DEF(iemOp_EscF3)
8932{
8933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8934 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8936 {
8937 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8938 {
8939 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8940 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8941 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8942 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8943 case 4:
8944 switch (bRm)
8945 {
8946 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8947 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8948 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8949 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8950 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8951 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8952 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8953 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8955 }
8956 break;
8957 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8958 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8959 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8961 }
8962 }
8963 else
8964 {
8965 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8966 {
8967 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8968 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8969 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8970 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8971 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8972 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8973 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8974 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8976 }
8977 }
8978}
8979
8980
8981/**
8982 * Common worker for FPU instructions working on STn and ST0, and storing the
8983 * result in STn unless IE, DE or ZE was raised.
8984 *
8985 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8986 */
8987FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8988{
8989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8990
8991 IEM_MC_BEGIN(3, 1);
8992 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8993 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8995 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8996
8997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8999
9000 IEM_MC_PREPARE_FPU_USAGE();
9001 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9002 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9003 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9004 IEM_MC_ELSE()
9005 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9006 IEM_MC_ENDIF();
9007 IEM_MC_ADVANCE_RIP();
9008
9009 IEM_MC_END();
9010 return VINF_SUCCESS;
9011}
9012
9013
9014/** Opcode 0xdc 11/0. */
9015FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9016{
9017 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9018 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9019}
9020
9021
9022/** Opcode 0xdc 11/1. */
9023FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9024{
9025 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9026 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9027}
9028
9029
9030/** Opcode 0xdc 11/4. */
9031FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9032{
9033 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9034 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9035}
9036
9037
9038/** Opcode 0xdc 11/5. */
9039FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9040{
9041 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9042 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9043}
9044
9045
9046/** Opcode 0xdc 11/6. */
9047FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9048{
9049 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9050 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9051}
9052
9053
9054/** Opcode 0xdc 11/7. */
9055FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9056{
9057 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9058 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9059}
9060
9061
9062/**
9063 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9064 * memory operand, and storing the result in ST0.
9065 *
9066 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9067 */
9068FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9069{
9070 IEM_MC_BEGIN(3, 3);
9071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9072 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9073 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9074 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9075 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9076 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9077
9078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9082
9083 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9084 IEM_MC_PREPARE_FPU_USAGE();
9085 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9086 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9087 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9088 IEM_MC_ELSE()
9089 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9090 IEM_MC_ENDIF();
9091 IEM_MC_ADVANCE_RIP();
9092
9093 IEM_MC_END();
9094 return VINF_SUCCESS;
9095}
9096
9097
9098/** Opcode 0xdc !11/0. */
9099FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9100{
9101 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9102 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9103}
9104
9105
9106/** Opcode 0xdc !11/1. */
9107FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9108{
9109 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9110 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9111}
9112
9113
9114/** Opcode 0xdc !11/2. */
9115FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9116{
9117 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9118
9119 IEM_MC_BEGIN(3, 3);
9120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9121 IEM_MC_LOCAL(uint16_t, u16Fsw);
9122 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9125 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9126
9127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9129
9130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9132 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9133
9134 IEM_MC_PREPARE_FPU_USAGE();
9135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9136 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9137 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9138 IEM_MC_ELSE()
9139 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9140 IEM_MC_ENDIF();
9141 IEM_MC_ADVANCE_RIP();
9142
9143 IEM_MC_END();
9144 return VINF_SUCCESS;
9145}
9146
9147
9148/** Opcode 0xdc !11/3. */
9149FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9150{
9151 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9152
9153 IEM_MC_BEGIN(3, 3);
9154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9155 IEM_MC_LOCAL(uint16_t, u16Fsw);
9156 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9157 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9158 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9159 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9160
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9163
9164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9166 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9167
9168 IEM_MC_PREPARE_FPU_USAGE();
9169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9171 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9172 IEM_MC_ELSE()
9173 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9174 IEM_MC_ENDIF();
9175 IEM_MC_ADVANCE_RIP();
9176
9177 IEM_MC_END();
9178 return VINF_SUCCESS;
9179}
9180
9181
9182/** Opcode 0xdc !11/4. */
9183FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9184{
9185 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9186 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9187}
9188
9189
9190/** Opcode 0xdc !11/5. */
9191FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9192{
9193 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9194 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9195}
9196
9197
9198/** Opcode 0xdc !11/6. */
9199FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9200{
9201 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9202 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9203}
9204
9205
9206/** Opcode 0xdc !11/7. */
9207FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9208{
9209 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9210 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9211}
9212
9213
9214/**
9215 * @opcode 0xdc
9216 */
9217FNIEMOP_DEF(iemOp_EscF4)
9218{
9219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9220 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9222 {
9223 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9224 {
9225 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9226 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9227 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9228 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9229 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9230 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9231 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9232 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9234 }
9235 }
9236 else
9237 {
9238 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9239 {
9240 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9241 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9242 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9243 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9244 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9245 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9246 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9247 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9249 }
9250 }
9251}
9252
9253
9254/** Opcode 0xdd !11/0.
9255 * @sa iemOp_fld_m32r */
9256FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9257{
9258 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9259
9260 IEM_MC_BEGIN(2, 3);
9261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9262 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9263 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9264 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9265 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9266
9267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9271
9272 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9273 IEM_MC_PREPARE_FPU_USAGE();
9274 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9275 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9276 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9277 IEM_MC_ELSE()
9278 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9279 IEM_MC_ENDIF();
9280 IEM_MC_ADVANCE_RIP();
9281
9282 IEM_MC_END();
9283 return VINF_SUCCESS;
9284}
9285
9286
9287/** Opcode 0xdd !11/0. */
9288FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9289{
9290 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9291 IEM_MC_BEGIN(3, 2);
9292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9293 IEM_MC_LOCAL(uint16_t, u16Fsw);
9294 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9295 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9296 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9297
9298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9301 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9302
9303 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9304 IEM_MC_PREPARE_FPU_USAGE();
9305 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9306 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9307 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9308 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9309 IEM_MC_ELSE()
9310 IEM_MC_IF_FCW_IM()
9311 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9312 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9313 IEM_MC_ENDIF();
9314 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9315 IEM_MC_ENDIF();
9316 IEM_MC_ADVANCE_RIP();
9317
9318 IEM_MC_END();
9319 return VINF_SUCCESS;
9320}
9321
9322
9323/** Opcode 0xdd !11/0. */
9324FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9325{
9326 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9327 IEM_MC_BEGIN(3, 2);
9328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9329 IEM_MC_LOCAL(uint16_t, u16Fsw);
9330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9331 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9333
9334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9338
9339 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9340 IEM_MC_PREPARE_FPU_USAGE();
9341 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9342 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9343 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9344 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9345 IEM_MC_ELSE()
9346 IEM_MC_IF_FCW_IM()
9347 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9348 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9349 IEM_MC_ENDIF();
9350 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9351 IEM_MC_ENDIF();
9352 IEM_MC_ADVANCE_RIP();
9353
9354 IEM_MC_END();
9355 return VINF_SUCCESS;
9356}
9357
9358
9359
9360
9361/** Opcode 0xdd !11/0. */
9362FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9363{
9364 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9365 IEM_MC_BEGIN(3, 2);
9366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9367 IEM_MC_LOCAL(uint16_t, u16Fsw);
9368 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9369 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9371
9372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9376
9377 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9378 IEM_MC_PREPARE_FPU_USAGE();
9379 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9380 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9381 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9382 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9383 IEM_MC_ELSE()
9384 IEM_MC_IF_FCW_IM()
9385 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9386 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9387 IEM_MC_ENDIF();
9388 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9389 IEM_MC_ENDIF();
9390 IEM_MC_ADVANCE_RIP();
9391
9392 IEM_MC_END();
9393 return VINF_SUCCESS;
9394}
9395
9396
9397/** Opcode 0xdd !11/0. */
9398FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9399{
9400 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9401 IEM_MC_BEGIN(3, 0);
9402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9403 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9404 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9408 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9409 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9410 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9411 IEM_MC_END();
9412 return VINF_SUCCESS;
9413}
9414
9415
9416/** Opcode 0xdd !11/0. */
9417FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9418{
9419 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9420 IEM_MC_BEGIN(3, 0);
9421 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9422 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9423 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9427 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9428 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9429 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9430 IEM_MC_END();
9431 return VINF_SUCCESS;
9432
9433}
9434
9435/** Opcode 0xdd !11/0. */
9436FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9437{
9438 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9439
9440 IEM_MC_BEGIN(0, 2);
9441 IEM_MC_LOCAL(uint16_t, u16Tmp);
9442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9443
9444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9446 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9447
9448 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9449 IEM_MC_FETCH_FSW(u16Tmp);
9450 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9451 IEM_MC_ADVANCE_RIP();
9452
9453/** @todo Debug / drop a hint to the verifier that things may differ
9454 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9455 * NT4SP1. (X86_FSW_PE) */
9456 IEM_MC_END();
9457 return VINF_SUCCESS;
9458}
9459
9460
9461/** Opcode 0xdd 11/0. */
9462FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9463{
9464 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9466 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9467 unmodified. */
9468
9469 IEM_MC_BEGIN(0, 0);
9470
9471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9473
9474 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9475 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9476 IEM_MC_UPDATE_FPU_OPCODE_IP();
9477
9478 IEM_MC_ADVANCE_RIP();
9479 IEM_MC_END();
9480 return VINF_SUCCESS;
9481}
9482
9483
9484/** Opcode 0xdd 11/1. */
9485FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9486{
9487 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9489
9490 IEM_MC_BEGIN(0, 2);
9491 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9492 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9493 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9494 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9495
9496 IEM_MC_PREPARE_FPU_USAGE();
9497 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9498 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9499 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9500 IEM_MC_ELSE()
9501 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9502 IEM_MC_ENDIF();
9503
9504 IEM_MC_ADVANCE_RIP();
9505 IEM_MC_END();
9506 return VINF_SUCCESS;
9507}
9508
9509
9510/** Opcode 0xdd 11/3. */
9511FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9512{
9513 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9514 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9515}
9516
9517
9518/** Opcode 0xdd 11/4. */
9519FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9520{
9521 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9522 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9523}
9524
9525
9526/**
9527 * @opcode 0xdd
9528 */
9529FNIEMOP_DEF(iemOp_EscF5)
9530{
9531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9532 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9534 {
9535 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9536 {
9537 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9538 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9539 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9540 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9541 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9542 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9543 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9544 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9546 }
9547 }
9548 else
9549 {
9550 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9551 {
9552 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9553 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9554 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9555 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9556 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9557 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9558 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9559 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9561 }
9562 }
9563}
9564
9565
9566/** Opcode 0xde 11/0. */
9567FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9568{
9569 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9570 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9571}
9572
9573
9574/** Opcode 0xde 11/0. */
9575FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9576{
9577 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9578 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9579}
9580
9581
9582/** Opcode 0xde 0xd9. */
9583FNIEMOP_DEF(iemOp_fcompp)
9584{
9585 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9586 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9587}
9588
9589
9590/** Opcode 0xde 11/4. */
9591FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9592{
9593 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9594 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9595}
9596
9597
9598/** Opcode 0xde 11/5. */
9599FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9600{
9601 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9602 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9603}
9604
9605
9606/** Opcode 0xde 11/6. */
9607FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9608{
9609 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9610 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9611}
9612
9613
9614/** Opcode 0xde 11/7. */
9615FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9616{
9617 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9618 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9619}
9620
9621
9622/**
9623 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9624 * the result in ST0.
9625 *
9626 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9627 */
9628FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9629{
9630 IEM_MC_BEGIN(3, 3);
9631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9632 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9633 IEM_MC_LOCAL(int16_t, i16Val2);
9634 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9635 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9636 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9637
9638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9640
9641 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9642 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9643 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9644
9645 IEM_MC_PREPARE_FPU_USAGE();
9646 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9647 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9648 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9649 IEM_MC_ELSE()
9650 IEM_MC_FPU_STACK_UNDERFLOW(0);
9651 IEM_MC_ENDIF();
9652 IEM_MC_ADVANCE_RIP();
9653
9654 IEM_MC_END();
9655 return VINF_SUCCESS;
9656}
9657
9658
9659/** Opcode 0xde !11/0. */
9660FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9661{
9662 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9663 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9664}
9665
9666
9667/** Opcode 0xde !11/1. */
9668FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9669{
9670 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9671 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9672}
9673
9674
9675/** Opcode 0xde !11/2. */
9676FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9677{
9678 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9679
9680 IEM_MC_BEGIN(3, 3);
9681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9682 IEM_MC_LOCAL(uint16_t, u16Fsw);
9683 IEM_MC_LOCAL(int16_t, i16Val2);
9684 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9685 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9686 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9687
9688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9690
9691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9693 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9694
9695 IEM_MC_PREPARE_FPU_USAGE();
9696 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9697 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9698 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9699 IEM_MC_ELSE()
9700 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9701 IEM_MC_ENDIF();
9702 IEM_MC_ADVANCE_RIP();
9703
9704 IEM_MC_END();
9705 return VINF_SUCCESS;
9706}
9707
9708
9709/** Opcode 0xde !11/3. */
9710FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9711{
9712 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9713
9714 IEM_MC_BEGIN(3, 3);
9715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9716 IEM_MC_LOCAL(uint16_t, u16Fsw);
9717 IEM_MC_LOCAL(int16_t, i16Val2);
9718 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9720 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9721
9722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9724
9725 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9726 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9727 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9728
9729 IEM_MC_PREPARE_FPU_USAGE();
9730 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9731 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9732 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9733 IEM_MC_ELSE()
9734 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9735 IEM_MC_ENDIF();
9736 IEM_MC_ADVANCE_RIP();
9737
9738 IEM_MC_END();
9739 return VINF_SUCCESS;
9740}
9741
9742
9743/** Opcode 0xde !11/4. */
9744FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9745{
9746 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9747 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9748}
9749
9750
9751/** Opcode 0xde !11/5. */
9752FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9753{
9754 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9755 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9756}
9757
9758
9759/** Opcode 0xde !11/6. */
9760FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9761{
9762 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9763 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9764}
9765
9766
9767/** Opcode 0xde !11/7. */
9768FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9769{
9770 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9771 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9772}
9773
9774
9775/**
9776 * @opcode 0xde
9777 */
9778FNIEMOP_DEF(iemOp_EscF6)
9779{
9780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9781 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9783 {
9784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9785 {
9786 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9787 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9788 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9789 case 3: if (bRm == 0xd9)
9790 return FNIEMOP_CALL(iemOp_fcompp);
9791 return IEMOP_RAISE_INVALID_OPCODE();
9792 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9793 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9794 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9795 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9797 }
9798 }
9799 else
9800 {
9801 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9802 {
9803 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9804 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9805 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9806 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9807 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9808 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9809 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9810 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9812 }
9813 }
9814}
9815
9816
9817/** Opcode 0xdf 11/0.
9818 * Undocument instruction, assumed to work like ffree + fincstp. */
9819FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9820{
9821 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823
9824 IEM_MC_BEGIN(0, 0);
9825
9826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9827 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9828
9829 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9830 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9831 IEM_MC_FPU_STACK_INC_TOP();
9832 IEM_MC_UPDATE_FPU_OPCODE_IP();
9833
9834 IEM_MC_ADVANCE_RIP();
9835 IEM_MC_END();
9836 return VINF_SUCCESS;
9837}
9838
9839
9840/** Opcode 0xdf 0xe0. */
9841FNIEMOP_DEF(iemOp_fnstsw_ax)
9842{
9843 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845
9846 IEM_MC_BEGIN(0, 1);
9847 IEM_MC_LOCAL(uint16_t, u16Tmp);
9848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9849 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9850 IEM_MC_FETCH_FSW(u16Tmp);
9851 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9852 IEM_MC_ADVANCE_RIP();
9853 IEM_MC_END();
9854 return VINF_SUCCESS;
9855}
9856
9857
9858/** Opcode 0xdf 11/5. */
9859FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9860{
9861 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9862 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9863}
9864
9865
9866/** Opcode 0xdf 11/6. */
9867FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9868{
9869 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9870 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9871}
9872
9873
9874/** Opcode 0xdf !11/0. */
9875FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9876{
9877 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9878
9879 IEM_MC_BEGIN(2, 3);
9880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9881 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9882 IEM_MC_LOCAL(int16_t, i16Val);
9883 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9884 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9885
9886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9888
9889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9891 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9892
9893 IEM_MC_PREPARE_FPU_USAGE();
9894 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9895 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9896 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9897 IEM_MC_ELSE()
9898 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9899 IEM_MC_ENDIF();
9900 IEM_MC_ADVANCE_RIP();
9901
9902 IEM_MC_END();
9903 return VINF_SUCCESS;
9904}
9905
9906
9907/** Opcode 0xdf !11/1. */
9908FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9909{
9910 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9911 IEM_MC_BEGIN(3, 2);
9912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9913 IEM_MC_LOCAL(uint16_t, u16Fsw);
9914 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9915 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9916 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9917
9918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9920 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9921 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9922
9923 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9924 IEM_MC_PREPARE_FPU_USAGE();
9925 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9926 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9927 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9928 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9929 IEM_MC_ELSE()
9930 IEM_MC_IF_FCW_IM()
9931 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9932 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9933 IEM_MC_ENDIF();
9934 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9935 IEM_MC_ENDIF();
9936 IEM_MC_ADVANCE_RIP();
9937
9938 IEM_MC_END();
9939 return VINF_SUCCESS;
9940}
9941
9942
9943/** Opcode 0xdf !11/2. */
9944FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9945{
9946 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9947 IEM_MC_BEGIN(3, 2);
9948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9949 IEM_MC_LOCAL(uint16_t, u16Fsw);
9950 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9951 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9952 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9953
9954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9956 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9957 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9958
9959 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9960 IEM_MC_PREPARE_FPU_USAGE();
9961 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9962 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9963 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9964 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9965 IEM_MC_ELSE()
9966 IEM_MC_IF_FCW_IM()
9967 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9968 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9969 IEM_MC_ENDIF();
9970 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9971 IEM_MC_ENDIF();
9972 IEM_MC_ADVANCE_RIP();
9973
9974 IEM_MC_END();
9975 return VINF_SUCCESS;
9976}
9977
9978
9979/** Opcode 0xdf !11/3. */
9980FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9981{
9982 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9983 IEM_MC_BEGIN(3, 2);
9984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9985 IEM_MC_LOCAL(uint16_t, u16Fsw);
9986 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9987 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9988 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9989
9990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9993 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9994
9995 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9996 IEM_MC_PREPARE_FPU_USAGE();
9997 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9998 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9999 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10000 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10001 IEM_MC_ELSE()
10002 IEM_MC_IF_FCW_IM()
10003 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10004 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10005 IEM_MC_ENDIF();
10006 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10007 IEM_MC_ENDIF();
10008 IEM_MC_ADVANCE_RIP();
10009
10010 IEM_MC_END();
10011 return VINF_SUCCESS;
10012}
10013
10014
10015/** Opcode 0xdf !11/4. */
10016FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10017
10018
10019/** Opcode 0xdf !11/5. */
10020FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10021{
10022 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10023
10024 IEM_MC_BEGIN(2, 3);
10025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10026 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10027 IEM_MC_LOCAL(int64_t, i64Val);
10028 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10029 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10030
10031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10033
10034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10036 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10037
10038 IEM_MC_PREPARE_FPU_USAGE();
10039 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10040 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10041 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10042 IEM_MC_ELSE()
10043 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10044 IEM_MC_ENDIF();
10045 IEM_MC_ADVANCE_RIP();
10046
10047 IEM_MC_END();
10048 return VINF_SUCCESS;
10049}
10050
10051
10052/** Opcode 0xdf !11/6. */
10053FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
10054
10055
10056/** Opcode 0xdf !11/7. */
10057FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10058{
10059 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10060 IEM_MC_BEGIN(3, 2);
10061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10062 IEM_MC_LOCAL(uint16_t, u16Fsw);
10063 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10064 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10065 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10066
10067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10069 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10070 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10071
10072 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10073 IEM_MC_PREPARE_FPU_USAGE();
10074 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10075 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10076 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10077 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10078 IEM_MC_ELSE()
10079 IEM_MC_IF_FCW_IM()
10080 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10081 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10082 IEM_MC_ENDIF();
10083 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10084 IEM_MC_ENDIF();
10085 IEM_MC_ADVANCE_RIP();
10086
10087 IEM_MC_END();
10088 return VINF_SUCCESS;
10089}
10090
10091
10092/**
10093 * @opcode 0xdf
10094 */
10095FNIEMOP_DEF(iemOp_EscF7)
10096{
10097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10099 {
10100 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10101 {
10102 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10103 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10104 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10105 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10106 case 4: if (bRm == 0xe0)
10107 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10108 return IEMOP_RAISE_INVALID_OPCODE();
10109 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10110 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10111 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10113 }
10114 }
10115 else
10116 {
10117 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10118 {
10119 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10120 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10121 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10122 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10123 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10124 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10125 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10126 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10128 }
10129 }
10130}
10131
10132
10133/**
10134 * @opcode 0xe0
10135 */
10136FNIEMOP_DEF(iemOp_loopne_Jb)
10137{
10138 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10139 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10141 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10142
10143 switch (pVCpu->iem.s.enmEffAddrMode)
10144 {
10145 case IEMMODE_16BIT:
10146 IEM_MC_BEGIN(0,0);
10147 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10148 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10149 IEM_MC_REL_JMP_S8(i8Imm);
10150 } IEM_MC_ELSE() {
10151 IEM_MC_ADVANCE_RIP();
10152 } IEM_MC_ENDIF();
10153 IEM_MC_END();
10154 return VINF_SUCCESS;
10155
10156 case IEMMODE_32BIT:
10157 IEM_MC_BEGIN(0,0);
10158 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10159 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10160 IEM_MC_REL_JMP_S8(i8Imm);
10161 } IEM_MC_ELSE() {
10162 IEM_MC_ADVANCE_RIP();
10163 } IEM_MC_ENDIF();
10164 IEM_MC_END();
10165 return VINF_SUCCESS;
10166
10167 case IEMMODE_64BIT:
10168 IEM_MC_BEGIN(0,0);
10169 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10170 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10171 IEM_MC_REL_JMP_S8(i8Imm);
10172 } IEM_MC_ELSE() {
10173 IEM_MC_ADVANCE_RIP();
10174 } IEM_MC_ENDIF();
10175 IEM_MC_END();
10176 return VINF_SUCCESS;
10177
10178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10179 }
10180}
10181
10182
10183/**
10184 * @opcode 0xe1
10185 */
10186FNIEMOP_DEF(iemOp_loope_Jb)
10187{
10188 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10189 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10192
10193 switch (pVCpu->iem.s.enmEffAddrMode)
10194 {
10195 case IEMMODE_16BIT:
10196 IEM_MC_BEGIN(0,0);
10197 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10198 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10199 IEM_MC_REL_JMP_S8(i8Imm);
10200 } IEM_MC_ELSE() {
10201 IEM_MC_ADVANCE_RIP();
10202 } IEM_MC_ENDIF();
10203 IEM_MC_END();
10204 return VINF_SUCCESS;
10205
10206 case IEMMODE_32BIT:
10207 IEM_MC_BEGIN(0,0);
10208 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10209 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10210 IEM_MC_REL_JMP_S8(i8Imm);
10211 } IEM_MC_ELSE() {
10212 IEM_MC_ADVANCE_RIP();
10213 } IEM_MC_ENDIF();
10214 IEM_MC_END();
10215 return VINF_SUCCESS;
10216
10217 case IEMMODE_64BIT:
10218 IEM_MC_BEGIN(0,0);
10219 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10220 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10221 IEM_MC_REL_JMP_S8(i8Imm);
10222 } IEM_MC_ELSE() {
10223 IEM_MC_ADVANCE_RIP();
10224 } IEM_MC_ENDIF();
10225 IEM_MC_END();
10226 return VINF_SUCCESS;
10227
10228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10229 }
10230}
10231
10232
10233/**
10234 * @opcode 0xe2
10235 */
10236FNIEMOP_DEF(iemOp_loop_Jb)
10237{
10238 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10239 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10241 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10242
10243 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10244 * using the 32-bit operand size override. How can that be restarted? See
10245 * weird pseudo code in intel manual. */
10246 switch (pVCpu->iem.s.enmEffAddrMode)
10247 {
10248 case IEMMODE_16BIT:
10249 IEM_MC_BEGIN(0,0);
10250 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10251 {
10252 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10253 IEM_MC_IF_CX_IS_NZ() {
10254 IEM_MC_REL_JMP_S8(i8Imm);
10255 } IEM_MC_ELSE() {
10256 IEM_MC_ADVANCE_RIP();
10257 } IEM_MC_ENDIF();
10258 }
10259 else
10260 {
10261 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10262 IEM_MC_ADVANCE_RIP();
10263 }
10264 IEM_MC_END();
10265 return VINF_SUCCESS;
10266
10267 case IEMMODE_32BIT:
10268 IEM_MC_BEGIN(0,0);
10269 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10270 {
10271 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10272 IEM_MC_IF_ECX_IS_NZ() {
10273 IEM_MC_REL_JMP_S8(i8Imm);
10274 } IEM_MC_ELSE() {
10275 IEM_MC_ADVANCE_RIP();
10276 } IEM_MC_ENDIF();
10277 }
10278 else
10279 {
10280 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10281 IEM_MC_ADVANCE_RIP();
10282 }
10283 IEM_MC_END();
10284 return VINF_SUCCESS;
10285
10286 case IEMMODE_64BIT:
10287 IEM_MC_BEGIN(0,0);
10288 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10289 {
10290 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10291 IEM_MC_IF_RCX_IS_NZ() {
10292 IEM_MC_REL_JMP_S8(i8Imm);
10293 } IEM_MC_ELSE() {
10294 IEM_MC_ADVANCE_RIP();
10295 } IEM_MC_ENDIF();
10296 }
10297 else
10298 {
10299 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10300 IEM_MC_ADVANCE_RIP();
10301 }
10302 IEM_MC_END();
10303 return VINF_SUCCESS;
10304
10305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10306 }
10307}
10308
10309
10310/**
10311 * @opcode 0xe3
10312 */
10313FNIEMOP_DEF(iemOp_jecxz_Jb)
10314{
10315 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10316 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10319
10320 switch (pVCpu->iem.s.enmEffAddrMode)
10321 {
10322 case IEMMODE_16BIT:
10323 IEM_MC_BEGIN(0,0);
10324 IEM_MC_IF_CX_IS_NZ() {
10325 IEM_MC_ADVANCE_RIP();
10326 } IEM_MC_ELSE() {
10327 IEM_MC_REL_JMP_S8(i8Imm);
10328 } IEM_MC_ENDIF();
10329 IEM_MC_END();
10330 return VINF_SUCCESS;
10331
10332 case IEMMODE_32BIT:
10333 IEM_MC_BEGIN(0,0);
10334 IEM_MC_IF_ECX_IS_NZ() {
10335 IEM_MC_ADVANCE_RIP();
10336 } IEM_MC_ELSE() {
10337 IEM_MC_REL_JMP_S8(i8Imm);
10338 } IEM_MC_ENDIF();
10339 IEM_MC_END();
10340 return VINF_SUCCESS;
10341
10342 case IEMMODE_64BIT:
10343 IEM_MC_BEGIN(0,0);
10344 IEM_MC_IF_RCX_IS_NZ() {
10345 IEM_MC_ADVANCE_RIP();
10346 } IEM_MC_ELSE() {
10347 IEM_MC_REL_JMP_S8(i8Imm);
10348 } IEM_MC_ENDIF();
10349 IEM_MC_END();
10350 return VINF_SUCCESS;
10351
10352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10353 }
10354}
10355
10356
10357/** Opcode 0xe4 */
10358FNIEMOP_DEF(iemOp_in_AL_Ib)
10359{
10360 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10361 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10363 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10364}
10365
10366
10367/** Opcode 0xe5 */
10368FNIEMOP_DEF(iemOp_in_eAX_Ib)
10369{
10370 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10371 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10374}
10375
10376
10377/** Opcode 0xe6 */
10378FNIEMOP_DEF(iemOp_out_Ib_AL)
10379{
10380 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10381 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10383 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10384}
10385
10386
10387/** Opcode 0xe7 */
10388FNIEMOP_DEF(iemOp_out_Ib_eAX)
10389{
10390 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10391 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10393 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10394}
10395
10396
10397/**
10398 * @opcode 0xe8
10399 */
10400FNIEMOP_DEF(iemOp_call_Jv)
10401{
10402 IEMOP_MNEMONIC(call_Jv, "call Jv");
10403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10404 switch (pVCpu->iem.s.enmEffOpSize)
10405 {
10406 case IEMMODE_16BIT:
10407 {
10408 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10409 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10410 }
10411
10412 case IEMMODE_32BIT:
10413 {
10414 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10415 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10416 }
10417
10418 case IEMMODE_64BIT:
10419 {
10420 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10421 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10422 }
10423
10424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10425 }
10426}
10427
10428
10429/**
10430 * @opcode 0xe9
10431 */
10432FNIEMOP_DEF(iemOp_jmp_Jv)
10433{
10434 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10435 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10436 switch (pVCpu->iem.s.enmEffOpSize)
10437 {
10438 case IEMMODE_16BIT:
10439 {
10440 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10441 IEM_MC_BEGIN(0, 0);
10442 IEM_MC_REL_JMP_S16(i16Imm);
10443 IEM_MC_END();
10444 return VINF_SUCCESS;
10445 }
10446
10447 case IEMMODE_64BIT:
10448 case IEMMODE_32BIT:
10449 {
10450 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10451 IEM_MC_BEGIN(0, 0);
10452 IEM_MC_REL_JMP_S32(i32Imm);
10453 IEM_MC_END();
10454 return VINF_SUCCESS;
10455 }
10456
10457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10458 }
10459}
10460
10461
10462/**
10463 * @opcode 0xea
10464 */
10465FNIEMOP_DEF(iemOp_jmp_Ap)
10466{
10467 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10468 IEMOP_HLP_NO_64BIT();
10469
10470 /* Decode the far pointer address and pass it on to the far call C implementation. */
10471 uint32_t offSeg;
10472 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10473 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10474 else
10475 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10476 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10478 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10479}
10480
10481
10482/**
10483 * @opcode 0xeb
10484 */
10485FNIEMOP_DEF(iemOp_jmp_Jb)
10486{
10487 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10488 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10491
10492 IEM_MC_BEGIN(0, 0);
10493 IEM_MC_REL_JMP_S8(i8Imm);
10494 IEM_MC_END();
10495 return VINF_SUCCESS;
10496}
10497
10498
10499/** Opcode 0xec */
10500FNIEMOP_DEF(iemOp_in_AL_DX)
10501{
10502 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10504 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10505}
10506
10507
10508/** Opcode 0xed */
10509FNIEMOP_DEF(iemOp_eAX_DX)
10510{
10511 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10513 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10514}
10515
10516
10517/** Opcode 0xee */
10518FNIEMOP_DEF(iemOp_out_DX_AL)
10519{
10520 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10522 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10523}
10524
10525
10526/** Opcode 0xef */
10527FNIEMOP_DEF(iemOp_out_DX_eAX)
10528{
10529 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10532}
10533
10534
10535/**
10536 * @opcode 0xf0
10537 */
10538FNIEMOP_DEF(iemOp_lock)
10539{
10540 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10541 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10542
10543 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10544 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10545}
10546
10547
10548/**
10549 * @opcode 0xf1
10550 */
10551FNIEMOP_DEF(iemOp_int1)
10552{
10553 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10554 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10555 /** @todo testcase! */
10556 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10557}
10558
10559
10560/**
10561 * @opcode 0xf2
10562 */
10563FNIEMOP_DEF(iemOp_repne)
10564{
10565 /* This overrides any previous REPE prefix. */
10566 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10567 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10568 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10569
10570 /* For the 4 entry opcode tables, REPNZ overrides any previous
10571 REPZ and operand size prefixes. */
10572 pVCpu->iem.s.idxPrefix = 3;
10573
10574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10575 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10576}
10577
10578
10579/**
10580 * @opcode 0xf3
10581 */
10582FNIEMOP_DEF(iemOp_repe)
10583{
10584 /* This overrides any previous REPNE prefix. */
10585 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10586 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10587 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10588
10589 /* For the 4 entry opcode tables, REPNZ overrides any previous
10590 REPNZ and operand size prefixes. */
10591 pVCpu->iem.s.idxPrefix = 2;
10592
10593 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10594 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10595}
10596
10597
10598/**
10599 * @opcode 0xf4
10600 */
10601FNIEMOP_DEF(iemOp_hlt)
10602{
10603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10604 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10605}
10606
10607
10608/**
10609 * @opcode 0xf5
10610 */
10611FNIEMOP_DEF(iemOp_cmc)
10612{
10613 IEMOP_MNEMONIC(cmc, "cmc");
10614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10615 IEM_MC_BEGIN(0, 0);
10616 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10617 IEM_MC_ADVANCE_RIP();
10618 IEM_MC_END();
10619 return VINF_SUCCESS;
10620}
10621
10622
10623/**
10624 * Common implementation of 'inc/dec/not/neg Eb'.
10625 *
10626 * @param bRm The RM byte.
10627 * @param pImpl The instruction implementation.
10628 */
10629FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10630{
10631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10632 {
10633 /* register access */
10634 IEM_MC_BEGIN(2, 0);
10635 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10636 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10637 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10638 IEM_MC_REF_EFLAGS(pEFlags);
10639 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10640 IEM_MC_ADVANCE_RIP();
10641 IEM_MC_END();
10642 }
10643 else
10644 {
10645 /* memory access. */
10646 IEM_MC_BEGIN(2, 2);
10647 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10648 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10650
10651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10652 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10653 IEM_MC_FETCH_EFLAGS(EFlags);
10654 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10655 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10656 else
10657 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10658
10659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10660 IEM_MC_COMMIT_EFLAGS(EFlags);
10661 IEM_MC_ADVANCE_RIP();
10662 IEM_MC_END();
10663 }
10664 return VINF_SUCCESS;
10665}
10666
10667
10668/**
10669 * Common implementation of 'inc/dec/not/neg Ev'.
10670 *
10671 * @param bRm The RM byte.
10672 * @param pImpl The instruction implementation.
10673 */
10674FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10675{
10676 /* Registers are handled by a common worker. */
10677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10678 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10679
10680 /* Memory we do here. */
10681 switch (pVCpu->iem.s.enmEffOpSize)
10682 {
10683 case IEMMODE_16BIT:
10684 IEM_MC_BEGIN(2, 2);
10685 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10686 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10688
10689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10690 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10691 IEM_MC_FETCH_EFLAGS(EFlags);
10692 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10693 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10694 else
10695 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10696
10697 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10698 IEM_MC_COMMIT_EFLAGS(EFlags);
10699 IEM_MC_ADVANCE_RIP();
10700 IEM_MC_END();
10701 return VINF_SUCCESS;
10702
10703 case IEMMODE_32BIT:
10704 IEM_MC_BEGIN(2, 2);
10705 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10708
10709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10710 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10711 IEM_MC_FETCH_EFLAGS(EFlags);
10712 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10713 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10714 else
10715 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10716
10717 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10718 IEM_MC_COMMIT_EFLAGS(EFlags);
10719 IEM_MC_ADVANCE_RIP();
10720 IEM_MC_END();
10721 return VINF_SUCCESS;
10722
10723 case IEMMODE_64BIT:
10724 IEM_MC_BEGIN(2, 2);
10725 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10726 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10728
10729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10730 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10731 IEM_MC_FETCH_EFLAGS(EFlags);
10732 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10733 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10734 else
10735 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10736
10737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10738 IEM_MC_COMMIT_EFLAGS(EFlags);
10739 IEM_MC_ADVANCE_RIP();
10740 IEM_MC_END();
10741 return VINF_SUCCESS;
10742
10743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10744 }
10745}
10746
10747
10748/** Opcode 0xf6 /0. */
10749FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10750{
10751 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10753
10754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10755 {
10756 /* register access */
10757 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10759
10760 IEM_MC_BEGIN(3, 0);
10761 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10762 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10764 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10765 IEM_MC_REF_EFLAGS(pEFlags);
10766 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10767 IEM_MC_ADVANCE_RIP();
10768 IEM_MC_END();
10769 }
10770 else
10771 {
10772 /* memory access. */
10773 IEM_MC_BEGIN(3, 2);
10774 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10775 IEM_MC_ARG(uint8_t, u8Src, 1);
10776 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10778
10779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10780 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10781 IEM_MC_ASSIGN(u8Src, u8Imm);
10782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10783 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10784 IEM_MC_FETCH_EFLAGS(EFlags);
10785 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10786
10787 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10788 IEM_MC_COMMIT_EFLAGS(EFlags);
10789 IEM_MC_ADVANCE_RIP();
10790 IEM_MC_END();
10791 }
10792 return VINF_SUCCESS;
10793}
10794
10795
10796/** Opcode 0xf7 /0. */
10797FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10798{
10799 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10800 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10801
10802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10803 {
10804 /* register access */
10805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10806 switch (pVCpu->iem.s.enmEffOpSize)
10807 {
10808 case IEMMODE_16BIT:
10809 {
10810 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10811 IEM_MC_BEGIN(3, 0);
10812 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10813 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10814 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10815 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10816 IEM_MC_REF_EFLAGS(pEFlags);
10817 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10818 IEM_MC_ADVANCE_RIP();
10819 IEM_MC_END();
10820 return VINF_SUCCESS;
10821 }
10822
10823 case IEMMODE_32BIT:
10824 {
10825 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10826 IEM_MC_BEGIN(3, 0);
10827 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10828 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10829 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10830 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10831 IEM_MC_REF_EFLAGS(pEFlags);
10832 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10833 /* No clearing the high dword here - test doesn't write back the result. */
10834 IEM_MC_ADVANCE_RIP();
10835 IEM_MC_END();
10836 return VINF_SUCCESS;
10837 }
10838
10839 case IEMMODE_64BIT:
10840 {
10841 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10842 IEM_MC_BEGIN(3, 0);
10843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10844 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10846 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10847 IEM_MC_REF_EFLAGS(pEFlags);
10848 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10849 IEM_MC_ADVANCE_RIP();
10850 IEM_MC_END();
10851 return VINF_SUCCESS;
10852 }
10853
10854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10855 }
10856 }
10857 else
10858 {
10859 /* memory access. */
10860 switch (pVCpu->iem.s.enmEffOpSize)
10861 {
10862 case IEMMODE_16BIT:
10863 {
10864 IEM_MC_BEGIN(3, 2);
10865 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10866 IEM_MC_ARG(uint16_t, u16Src, 1);
10867 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10869
10870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10871 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10872 IEM_MC_ASSIGN(u16Src, u16Imm);
10873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10874 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10875 IEM_MC_FETCH_EFLAGS(EFlags);
10876 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10877
10878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10879 IEM_MC_COMMIT_EFLAGS(EFlags);
10880 IEM_MC_ADVANCE_RIP();
10881 IEM_MC_END();
10882 return VINF_SUCCESS;
10883 }
10884
10885 case IEMMODE_32BIT:
10886 {
10887 IEM_MC_BEGIN(3, 2);
10888 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10889 IEM_MC_ARG(uint32_t, u32Src, 1);
10890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10892
10893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10894 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10895 IEM_MC_ASSIGN(u32Src, u32Imm);
10896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10897 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10898 IEM_MC_FETCH_EFLAGS(EFlags);
10899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10900
10901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10902 IEM_MC_COMMIT_EFLAGS(EFlags);
10903 IEM_MC_ADVANCE_RIP();
10904 IEM_MC_END();
10905 return VINF_SUCCESS;
10906 }
10907
10908 case IEMMODE_64BIT:
10909 {
10910 IEM_MC_BEGIN(3, 2);
10911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10912 IEM_MC_ARG(uint64_t, u64Src, 1);
10913 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10915
10916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10917 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10918 IEM_MC_ASSIGN(u64Src, u64Imm);
10919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10920 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10921 IEM_MC_FETCH_EFLAGS(EFlags);
10922 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10923
10924 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10925 IEM_MC_COMMIT_EFLAGS(EFlags);
10926 IEM_MC_ADVANCE_RIP();
10927 IEM_MC_END();
10928 return VINF_SUCCESS;
10929 }
10930
10931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10932 }
10933 }
10934}
10935
10936
10937/** Opcode 0xf6 /4, /5, /6 and /7. */
10938FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10939{
10940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10941 {
10942 /* register access */
10943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10944 IEM_MC_BEGIN(3, 1);
10945 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10946 IEM_MC_ARG(uint8_t, u8Value, 1);
10947 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10948 IEM_MC_LOCAL(int32_t, rc);
10949
10950 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10951 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10952 IEM_MC_REF_EFLAGS(pEFlags);
10953 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10954 IEM_MC_IF_LOCAL_IS_Z(rc) {
10955 IEM_MC_ADVANCE_RIP();
10956 } IEM_MC_ELSE() {
10957 IEM_MC_RAISE_DIVIDE_ERROR();
10958 } IEM_MC_ENDIF();
10959
10960 IEM_MC_END();
10961 }
10962 else
10963 {
10964 /* memory access. */
10965 IEM_MC_BEGIN(3, 2);
10966 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10967 IEM_MC_ARG(uint8_t, u8Value, 1);
10968 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10970 IEM_MC_LOCAL(int32_t, rc);
10971
10972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10974 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10975 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10976 IEM_MC_REF_EFLAGS(pEFlags);
10977 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10978 IEM_MC_IF_LOCAL_IS_Z(rc) {
10979 IEM_MC_ADVANCE_RIP();
10980 } IEM_MC_ELSE() {
10981 IEM_MC_RAISE_DIVIDE_ERROR();
10982 } IEM_MC_ENDIF();
10983
10984 IEM_MC_END();
10985 }
10986 return VINF_SUCCESS;
10987}
10988
10989
10990/** Opcode 0xf7 /4, /5, /6 and /7. */
10991FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10992{
10993 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10994
10995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10996 {
10997 /* register access */
10998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10999 switch (pVCpu->iem.s.enmEffOpSize)
11000 {
11001 case IEMMODE_16BIT:
11002 {
11003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11004 IEM_MC_BEGIN(4, 1);
11005 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11006 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11007 IEM_MC_ARG(uint16_t, u16Value, 2);
11008 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11009 IEM_MC_LOCAL(int32_t, rc);
11010
11011 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11012 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11013 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11014 IEM_MC_REF_EFLAGS(pEFlags);
11015 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11016 IEM_MC_IF_LOCAL_IS_Z(rc) {
11017 IEM_MC_ADVANCE_RIP();
11018 } IEM_MC_ELSE() {
11019 IEM_MC_RAISE_DIVIDE_ERROR();
11020 } IEM_MC_ENDIF();
11021
11022 IEM_MC_END();
11023 return VINF_SUCCESS;
11024 }
11025
11026 case IEMMODE_32BIT:
11027 {
11028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11029 IEM_MC_BEGIN(4, 1);
11030 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11031 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11032 IEM_MC_ARG(uint32_t, u32Value, 2);
11033 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11034 IEM_MC_LOCAL(int32_t, rc);
11035
11036 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11037 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11038 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11039 IEM_MC_REF_EFLAGS(pEFlags);
11040 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11041 IEM_MC_IF_LOCAL_IS_Z(rc) {
11042 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11043 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11044 IEM_MC_ADVANCE_RIP();
11045 } IEM_MC_ELSE() {
11046 IEM_MC_RAISE_DIVIDE_ERROR();
11047 } IEM_MC_ENDIF();
11048
11049 IEM_MC_END();
11050 return VINF_SUCCESS;
11051 }
11052
11053 case IEMMODE_64BIT:
11054 {
11055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11056 IEM_MC_BEGIN(4, 1);
11057 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11058 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11059 IEM_MC_ARG(uint64_t, u64Value, 2);
11060 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11061 IEM_MC_LOCAL(int32_t, rc);
11062
11063 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11064 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11065 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11066 IEM_MC_REF_EFLAGS(pEFlags);
11067 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11068 IEM_MC_IF_LOCAL_IS_Z(rc) {
11069 IEM_MC_ADVANCE_RIP();
11070 } IEM_MC_ELSE() {
11071 IEM_MC_RAISE_DIVIDE_ERROR();
11072 } IEM_MC_ENDIF();
11073
11074 IEM_MC_END();
11075 return VINF_SUCCESS;
11076 }
11077
11078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11079 }
11080 }
11081 else
11082 {
11083 /* memory access. */
11084 switch (pVCpu->iem.s.enmEffOpSize)
11085 {
11086 case IEMMODE_16BIT:
11087 {
11088 IEM_MC_BEGIN(4, 2);
11089 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11090 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11091 IEM_MC_ARG(uint16_t, u16Value, 2);
11092 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11094 IEM_MC_LOCAL(int32_t, rc);
11095
11096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11098 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11099 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11100 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11101 IEM_MC_REF_EFLAGS(pEFlags);
11102 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11103 IEM_MC_IF_LOCAL_IS_Z(rc) {
11104 IEM_MC_ADVANCE_RIP();
11105 } IEM_MC_ELSE() {
11106 IEM_MC_RAISE_DIVIDE_ERROR();
11107 } IEM_MC_ENDIF();
11108
11109 IEM_MC_END();
11110 return VINF_SUCCESS;
11111 }
11112
11113 case IEMMODE_32BIT:
11114 {
11115 IEM_MC_BEGIN(4, 2);
11116 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11117 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11118 IEM_MC_ARG(uint32_t, u32Value, 2);
11119 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11121 IEM_MC_LOCAL(int32_t, rc);
11122
11123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11125 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11126 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11127 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11128 IEM_MC_REF_EFLAGS(pEFlags);
11129 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11130 IEM_MC_IF_LOCAL_IS_Z(rc) {
11131 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11132 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11133 IEM_MC_ADVANCE_RIP();
11134 } IEM_MC_ELSE() {
11135 IEM_MC_RAISE_DIVIDE_ERROR();
11136 } IEM_MC_ENDIF();
11137
11138 IEM_MC_END();
11139 return VINF_SUCCESS;
11140 }
11141
11142 case IEMMODE_64BIT:
11143 {
11144 IEM_MC_BEGIN(4, 2);
11145 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11146 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11147 IEM_MC_ARG(uint64_t, u64Value, 2);
11148 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11150 IEM_MC_LOCAL(int32_t, rc);
11151
11152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11154 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11155 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11156 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11157 IEM_MC_REF_EFLAGS(pEFlags);
11158 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11159 IEM_MC_IF_LOCAL_IS_Z(rc) {
11160 IEM_MC_ADVANCE_RIP();
11161 } IEM_MC_ELSE() {
11162 IEM_MC_RAISE_DIVIDE_ERROR();
11163 } IEM_MC_ENDIF();
11164
11165 IEM_MC_END();
11166 return VINF_SUCCESS;
11167 }
11168
11169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11170 }
11171 }
11172}
11173
11174/**
11175 * @opcode 0xf6
11176 */
11177FNIEMOP_DEF(iemOp_Grp3_Eb)
11178{
11179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11181 {
11182 case 0:
11183 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11184 case 1:
11185/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11186 return IEMOP_RAISE_INVALID_OPCODE();
11187 case 2:
11188 IEMOP_MNEMONIC(not_Eb, "not Eb");
11189 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11190 case 3:
11191 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11192 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11193 case 4:
11194 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11195 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11196 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11197 case 5:
11198 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11199 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11200 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11201 case 6:
11202 IEMOP_MNEMONIC(div_Eb, "div Eb");
11203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11204 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11205 case 7:
11206 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11207 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11208 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11210 }
11211}
11212
11213
11214/**
11215 * @opcode 0xf7
11216 */
11217FNIEMOP_DEF(iemOp_Grp3_Ev)
11218{
11219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11220 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11221 {
11222 case 0:
11223 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11224 case 1:
11225/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11226 return IEMOP_RAISE_INVALID_OPCODE();
11227 case 2:
11228 IEMOP_MNEMONIC(not_Ev, "not Ev");
11229 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11230 case 3:
11231 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11232 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11233 case 4:
11234 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11236 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11237 case 5:
11238 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11240 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11241 case 6:
11242 IEMOP_MNEMONIC(div_Ev, "div Ev");
11243 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11244 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11245 case 7:
11246 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11248 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11250 }
11251}
11252
11253
11254/**
11255 * @opcode 0xf8
11256 */
11257FNIEMOP_DEF(iemOp_clc)
11258{
11259 IEMOP_MNEMONIC(clc, "clc");
11260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11261 IEM_MC_BEGIN(0, 0);
11262 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11263 IEM_MC_ADVANCE_RIP();
11264 IEM_MC_END();
11265 return VINF_SUCCESS;
11266}
11267
11268
11269/**
11270 * @opcode 0xf9
11271 */
11272FNIEMOP_DEF(iemOp_stc)
11273{
11274 IEMOP_MNEMONIC(stc, "stc");
11275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11276 IEM_MC_BEGIN(0, 0);
11277 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11278 IEM_MC_ADVANCE_RIP();
11279 IEM_MC_END();
11280 return VINF_SUCCESS;
11281}
11282
11283
11284/**
11285 * @opcode 0xfa
11286 */
11287FNIEMOP_DEF(iemOp_cli)
11288{
11289 IEMOP_MNEMONIC(cli, "cli");
11290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11291 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11292}
11293
11294
11295FNIEMOP_DEF(iemOp_sti)
11296{
11297 IEMOP_MNEMONIC(sti, "sti");
11298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11299 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11300}
11301
11302
11303/**
11304 * @opcode 0xfc
11305 */
11306FNIEMOP_DEF(iemOp_cld)
11307{
11308 IEMOP_MNEMONIC(cld, "cld");
11309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11310 IEM_MC_BEGIN(0, 0);
11311 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11312 IEM_MC_ADVANCE_RIP();
11313 IEM_MC_END();
11314 return VINF_SUCCESS;
11315}
11316
11317
11318/**
11319 * @opcode 0xfd
11320 */
11321FNIEMOP_DEF(iemOp_std)
11322{
11323 IEMOP_MNEMONIC(std, "std");
11324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11325 IEM_MC_BEGIN(0, 0);
11326 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11327 IEM_MC_ADVANCE_RIP();
11328 IEM_MC_END();
11329 return VINF_SUCCESS;
11330}
11331
11332
11333/**
11334 * @opcode 0xfe
11335 */
11336FNIEMOP_DEF(iemOp_Grp4)
11337{
11338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11339 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11340 {
11341 case 0:
11342 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11343 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11344 case 1:
11345 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11346 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11347 default:
11348 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11349 return IEMOP_RAISE_INVALID_OPCODE();
11350 }
11351}
11352
11353
11354/**
11355 * Opcode 0xff /2.
11356 * @param bRm The RM byte.
11357 */
11358FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11359{
11360 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11362
11363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11364 {
11365 /* The new RIP is taken from a register. */
11366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11367 switch (pVCpu->iem.s.enmEffOpSize)
11368 {
11369 case IEMMODE_16BIT:
11370 IEM_MC_BEGIN(1, 0);
11371 IEM_MC_ARG(uint16_t, u16Target, 0);
11372 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11373 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11374 IEM_MC_END()
11375 return VINF_SUCCESS;
11376
11377 case IEMMODE_32BIT:
11378 IEM_MC_BEGIN(1, 0);
11379 IEM_MC_ARG(uint32_t, u32Target, 0);
11380 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11381 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11382 IEM_MC_END()
11383 return VINF_SUCCESS;
11384
11385 case IEMMODE_64BIT:
11386 IEM_MC_BEGIN(1, 0);
11387 IEM_MC_ARG(uint64_t, u64Target, 0);
11388 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11389 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11390 IEM_MC_END()
11391 return VINF_SUCCESS;
11392
11393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11394 }
11395 }
11396 else
11397 {
11398 /* The new RIP is taken from a register. */
11399 switch (pVCpu->iem.s.enmEffOpSize)
11400 {
11401 case IEMMODE_16BIT:
11402 IEM_MC_BEGIN(1, 1);
11403 IEM_MC_ARG(uint16_t, u16Target, 0);
11404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11407 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11408 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11409 IEM_MC_END()
11410 return VINF_SUCCESS;
11411
11412 case IEMMODE_32BIT:
11413 IEM_MC_BEGIN(1, 1);
11414 IEM_MC_ARG(uint32_t, u32Target, 0);
11415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11418 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11419 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11420 IEM_MC_END()
11421 return VINF_SUCCESS;
11422
11423 case IEMMODE_64BIT:
11424 IEM_MC_BEGIN(1, 1);
11425 IEM_MC_ARG(uint64_t, u64Target, 0);
11426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11429 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11430 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11431 IEM_MC_END()
11432 return VINF_SUCCESS;
11433
11434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11435 }
11436 }
11437}
11438
11439typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11440
11441FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11442{
11443 /* Registers? How?? */
11444 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11445 { /* likely */ }
11446 else
11447 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11448
11449 /* Far pointer loaded from memory. */
11450 switch (pVCpu->iem.s.enmEffOpSize)
11451 {
11452 case IEMMODE_16BIT:
11453 IEM_MC_BEGIN(3, 1);
11454 IEM_MC_ARG(uint16_t, u16Sel, 0);
11455 IEM_MC_ARG(uint16_t, offSeg, 1);
11456 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11460 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11461 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11462 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11463 IEM_MC_END();
11464 return VINF_SUCCESS;
11465
11466 case IEMMODE_64BIT:
11467 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11468 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11469 * and call far qword [rsp] encodings. */
11470 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11471 {
11472 IEM_MC_BEGIN(3, 1);
11473 IEM_MC_ARG(uint16_t, u16Sel, 0);
11474 IEM_MC_ARG(uint64_t, offSeg, 1);
11475 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11480 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11481 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11482 IEM_MC_END();
11483 return VINF_SUCCESS;
11484 }
11485 /* AMD falls thru. */
11486 /* fall thru */
11487
11488 case IEMMODE_32BIT:
11489 IEM_MC_BEGIN(3, 1);
11490 IEM_MC_ARG(uint16_t, u16Sel, 0);
11491 IEM_MC_ARG(uint32_t, offSeg, 1);
11492 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11496 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11497 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11498 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11499 IEM_MC_END();
11500 return VINF_SUCCESS;
11501
11502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11503 }
11504}
11505
11506
11507/**
11508 * Opcode 0xff /3.
11509 * @param bRm The RM byte.
11510 */
11511FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11512{
11513 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11514 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11515}
11516
11517
11518/**
11519 * Opcode 0xff /4.
11520 * @param bRm The RM byte.
11521 */
11522FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11523{
11524 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11525 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11526
11527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11528 {
11529 /* The new RIP is taken from a register. */
11530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11531 switch (pVCpu->iem.s.enmEffOpSize)
11532 {
11533 case IEMMODE_16BIT:
11534 IEM_MC_BEGIN(0, 1);
11535 IEM_MC_LOCAL(uint16_t, u16Target);
11536 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11537 IEM_MC_SET_RIP_U16(u16Target);
11538 IEM_MC_END()
11539 return VINF_SUCCESS;
11540
11541 case IEMMODE_32BIT:
11542 IEM_MC_BEGIN(0, 1);
11543 IEM_MC_LOCAL(uint32_t, u32Target);
11544 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11545 IEM_MC_SET_RIP_U32(u32Target);
11546 IEM_MC_END()
11547 return VINF_SUCCESS;
11548
11549 case IEMMODE_64BIT:
11550 IEM_MC_BEGIN(0, 1);
11551 IEM_MC_LOCAL(uint64_t, u64Target);
11552 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11553 IEM_MC_SET_RIP_U64(u64Target);
11554 IEM_MC_END()
11555 return VINF_SUCCESS;
11556
11557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11558 }
11559 }
11560 else
11561 {
11562 /* The new RIP is taken from a memory location. */
11563 switch (pVCpu->iem.s.enmEffOpSize)
11564 {
11565 case IEMMODE_16BIT:
11566 IEM_MC_BEGIN(0, 2);
11567 IEM_MC_LOCAL(uint16_t, u16Target);
11568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11571 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11572 IEM_MC_SET_RIP_U16(u16Target);
11573 IEM_MC_END()
11574 return VINF_SUCCESS;
11575
11576 case IEMMODE_32BIT:
11577 IEM_MC_BEGIN(0, 2);
11578 IEM_MC_LOCAL(uint32_t, u32Target);
11579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11582 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11583 IEM_MC_SET_RIP_U32(u32Target);
11584 IEM_MC_END()
11585 return VINF_SUCCESS;
11586
11587 case IEMMODE_64BIT:
11588 IEM_MC_BEGIN(0, 2);
11589 IEM_MC_LOCAL(uint64_t, u64Target);
11590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11593 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11594 IEM_MC_SET_RIP_U64(u64Target);
11595 IEM_MC_END()
11596 return VINF_SUCCESS;
11597
11598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11599 }
11600 }
11601}
11602
11603
11604/**
11605 * Opcode 0xff /5.
11606 * @param bRm The RM byte.
11607 */
11608FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11609{
11610 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11611 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11612}
11613
11614
11615/**
11616 * Opcode 0xff /6.
11617 * @param bRm The RM byte.
11618 */
11619FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11620{
11621 IEMOP_MNEMONIC(push_Ev, "push Ev");
11622
11623 /* Registers are handled by a common worker. */
11624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11625 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11626
11627 /* Memory we do here. */
11628 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11629 switch (pVCpu->iem.s.enmEffOpSize)
11630 {
11631 case IEMMODE_16BIT:
11632 IEM_MC_BEGIN(0, 2);
11633 IEM_MC_LOCAL(uint16_t, u16Src);
11634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11637 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11638 IEM_MC_PUSH_U16(u16Src);
11639 IEM_MC_ADVANCE_RIP();
11640 IEM_MC_END();
11641 return VINF_SUCCESS;
11642
11643 case IEMMODE_32BIT:
11644 IEM_MC_BEGIN(0, 2);
11645 IEM_MC_LOCAL(uint32_t, u32Src);
11646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11649 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11650 IEM_MC_PUSH_U32(u32Src);
11651 IEM_MC_ADVANCE_RIP();
11652 IEM_MC_END();
11653 return VINF_SUCCESS;
11654
11655 case IEMMODE_64BIT:
11656 IEM_MC_BEGIN(0, 2);
11657 IEM_MC_LOCAL(uint64_t, u64Src);
11658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11661 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11662 IEM_MC_PUSH_U64(u64Src);
11663 IEM_MC_ADVANCE_RIP();
11664 IEM_MC_END();
11665 return VINF_SUCCESS;
11666
11667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11668 }
11669}
11670
11671
11672/**
11673 * @opcode 0xff
11674 */
11675FNIEMOP_DEF(iemOp_Grp5)
11676{
11677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11679 {
11680 case 0:
11681 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11682 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11683 case 1:
11684 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11685 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11686 case 2:
11687 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11688 case 3:
11689 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11690 case 4:
11691 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11692 case 5:
11693 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11694 case 6:
11695 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11696 case 7:
11697 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11698 return IEMOP_RAISE_INVALID_OPCODE();
11699 }
11700 AssertFailedReturn(VERR_IEM_IPE_3);
11701}
11702
11703
11704
11705const PFNIEMOP g_apfnOneByteMap[256] =
11706{
11707 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11708 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11709 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11710 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11711 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11712 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11713 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11714 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11715 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11716 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11717 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11718 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11719 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11720 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11721 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11722 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11723 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11724 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11725 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11726 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11727 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11728 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11729 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11730 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11731 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11732 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11733 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11734 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11735 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11736 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11737 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11738 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11739 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11740 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11741 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11742 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11743 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11744 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11745 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11746 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11747 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11748 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11749 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11750 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11751 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11752 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11753 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11754 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11755 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11756 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11757 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11758 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11759 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11760 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11761 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11762 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11763 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11764 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11765 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11766 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11767 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11768 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11769 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11770 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11771};
11772
11773
11774/** @} */
11775
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette