VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66257

Last change on this file since 66257 was 66160, checked in by vboxsync, 8 years ago

IEM: Mark instructions allowing lock prefix use (incomplete).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 389.8 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66160 2017-03-17 22:42:52Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_size
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
910 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
912 * @optest efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
913 * @optest efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
914 * @optest efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
915 * @optest efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
916 * @optest efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
917 * @optest efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
918 * @optest efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
919 * @optest efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
920 * @optest efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
921 * @optest efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
922 * @optest efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
923 */
924FNIEMOP_DEF(iemOp_aaa)
925{
926 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
927 IEMOP_HLP_NO_64BIT();
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
930
931 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
932}
933
934
935/**
936 * @opcode 0x38
937 */
938FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
939{
940 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
941 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
942}
943
944
945/**
946 * @opcode 0x39
947 */
948FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
949{
950 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
951 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
952}
953
954
955/**
956 * @opcode 0x3a
957 */
958FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
959{
960 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
961 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
962}
963
964
965/**
966 * @opcode 0x3b
967 */
968FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
969{
970 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
971 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
972}
973
974
975/**
976 * @opcode 0x3c
977 */
978FNIEMOP_DEF(iemOp_cmp_Al_Ib)
979{
980 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
981 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
982}
983
984
985/**
986 * @opcode 0x3d
987 */
988FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
989{
990 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
991 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
992}
993
994
995/**
996 * @opcode 0x3e
997 */
998FNIEMOP_DEF(iemOp_seg_DS)
999{
1000 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1001 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1002 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1003
1004 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1005 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1006}
1007
1008
1009/**
1010 * @opcode 0x3f
1011 * @opfltest af,cf
1012 * @opflmodify cf,pf,af,zf,sf,of
1013 * @opflundef pf,zf,sf,of
1014 * @opgroup og_gen_arith_dec
1015 * @optest efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1016 * @optest efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1017 * @optest efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1018 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1019 * @optest efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1020 * @optest efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1021 * @optest efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1022 * @optest efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1023 * @optest efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1024 * @optest efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1025 * @optest efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1026 * @optest efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1027 * @optest efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1028 * @optest efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1029 * @optest efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1030 * @optest efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1031 * @optest efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1032 * @optest efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1033 * @optest efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1034 */
1035FNIEMOP_DEF(iemOp_aas)
1036{
1037 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1038 IEMOP_HLP_NO_64BIT();
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1041
1042 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1043}
1044
1045
1046/**
1047 * Common 'inc/dec/not/neg register' helper.
1048 */
1049FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1050{
1051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1052 switch (pVCpu->iem.s.enmEffOpSize)
1053 {
1054 case IEMMODE_16BIT:
1055 IEM_MC_BEGIN(2, 0);
1056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1057 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1058 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1059 IEM_MC_REF_EFLAGS(pEFlags);
1060 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1061 IEM_MC_ADVANCE_RIP();
1062 IEM_MC_END();
1063 return VINF_SUCCESS;
1064
1065 case IEMMODE_32BIT:
1066 IEM_MC_BEGIN(2, 0);
1067 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1068 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1069 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1070 IEM_MC_REF_EFLAGS(pEFlags);
1071 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1072 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1073 IEM_MC_ADVANCE_RIP();
1074 IEM_MC_END();
1075 return VINF_SUCCESS;
1076
1077 case IEMMODE_64BIT:
1078 IEM_MC_BEGIN(2, 0);
1079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1080 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1081 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1082 IEM_MC_REF_EFLAGS(pEFlags);
1083 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1084 IEM_MC_ADVANCE_RIP();
1085 IEM_MC_END();
1086 return VINF_SUCCESS;
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/**
1093 * @opcode 0x40
1094 */
1095FNIEMOP_DEF(iemOp_inc_eAX)
1096{
1097 /*
1098 * This is a REX prefix in 64-bit mode.
1099 */
1100 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1101 {
1102 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1103 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1104
1105 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1106 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1107 }
1108
1109 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1110 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1111}
1112
1113
1114/**
1115 * @opcode 0x41
1116 */
1117FNIEMOP_DEF(iemOp_inc_eCX)
1118{
1119 /*
1120 * This is a REX prefix in 64-bit mode.
1121 */
1122 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1123 {
1124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1126 pVCpu->iem.s.uRexB = 1 << 3;
1127
1128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1130 }
1131
1132 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1133 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1134}
1135
1136
1137/**
1138 * @opcode 0x42
1139 */
1140FNIEMOP_DEF(iemOp_inc_eDX)
1141{
1142 /*
1143 * This is a REX prefix in 64-bit mode.
1144 */
1145 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1146 {
1147 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1148 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1149 pVCpu->iem.s.uRexIndex = 1 << 3;
1150
1151 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1152 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1153 }
1154
1155 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1156 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1157}
1158
1159
1160
1161/**
1162 * @opcode 0x43
1163 */
1164FNIEMOP_DEF(iemOp_inc_eBX)
1165{
1166 /*
1167 * This is a REX prefix in 64-bit mode.
1168 */
1169 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1170 {
1171 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1172 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1173 pVCpu->iem.s.uRexB = 1 << 3;
1174 pVCpu->iem.s.uRexIndex = 1 << 3;
1175
1176 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1177 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1178 }
1179
1180 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1181 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1182}
1183
1184
1185/**
1186 * @opcode 0x44
1187 */
1188FNIEMOP_DEF(iemOp_inc_eSP)
1189{
1190 /*
1191 * This is a REX prefix in 64-bit mode.
1192 */
1193 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1194 {
1195 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1196 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1197 pVCpu->iem.s.uRexReg = 1 << 3;
1198
1199 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1200 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1201 }
1202
1203 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1204 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1205}
1206
1207
1208/**
1209 * @opcode 0x45
1210 */
1211FNIEMOP_DEF(iemOp_inc_eBP)
1212{
1213 /*
1214 * This is a REX prefix in 64-bit mode.
1215 */
1216 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1217 {
1218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1219 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1220 pVCpu->iem.s.uRexReg = 1 << 3;
1221 pVCpu->iem.s.uRexB = 1 << 3;
1222
1223 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1224 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1225 }
1226
1227 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1228 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1229}
1230
1231
1232/**
1233 * @opcode 0x46
1234 */
1235FNIEMOP_DEF(iemOp_inc_eSI)
1236{
1237 /*
1238 * This is a REX prefix in 64-bit mode.
1239 */
1240 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1241 {
1242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1243 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1244 pVCpu->iem.s.uRexReg = 1 << 3;
1245 pVCpu->iem.s.uRexIndex = 1 << 3;
1246
1247 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1248 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1249 }
1250
1251 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1252 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1253}
1254
1255
1256/**
1257 * @opcode 0x47
1258 */
1259FNIEMOP_DEF(iemOp_inc_eDI)
1260{
1261 /*
1262 * This is a REX prefix in 64-bit mode.
1263 */
1264 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1265 {
1266 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1267 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1268 pVCpu->iem.s.uRexReg = 1 << 3;
1269 pVCpu->iem.s.uRexB = 1 << 3;
1270 pVCpu->iem.s.uRexIndex = 1 << 3;
1271
1272 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1273 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1274 }
1275
1276 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1277 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1278}
1279
1280
1281/**
1282 * @opcode 0x48
1283 */
1284FNIEMOP_DEF(iemOp_dec_eAX)
1285{
1286 /*
1287 * This is a REX prefix in 64-bit mode.
1288 */
1289 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1290 {
1291 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1292 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1293 iemRecalEffOpSize(pVCpu);
1294
1295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1297 }
1298
1299 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1300 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1301}
1302
1303
1304/**
1305 * @opcode 0x49
1306 */
1307FNIEMOP_DEF(iemOp_dec_eCX)
1308{
1309 /*
1310 * This is a REX prefix in 64-bit mode.
1311 */
1312 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1313 {
1314 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1315 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1316 pVCpu->iem.s.uRexB = 1 << 3;
1317 iemRecalEffOpSize(pVCpu);
1318
1319 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1320 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1321 }
1322
1323 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1324 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1325}
1326
1327
1328/**
1329 * @opcode 0x4a
1330 */
1331FNIEMOP_DEF(iemOp_dec_eDX)
1332{
1333 /*
1334 * This is a REX prefix in 64-bit mode.
1335 */
1336 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1337 {
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1340 pVCpu->iem.s.uRexIndex = 1 << 3;
1341 iemRecalEffOpSize(pVCpu);
1342
1343 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1344 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1345 }
1346
1347 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1348 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1349}
1350
1351
1352/**
1353 * @opcode 0x4b
1354 */
1355FNIEMOP_DEF(iemOp_dec_eBX)
1356{
1357 /*
1358 * This is a REX prefix in 64-bit mode.
1359 */
1360 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1361 {
1362 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1363 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1364 pVCpu->iem.s.uRexB = 1 << 3;
1365 pVCpu->iem.s.uRexIndex = 1 << 3;
1366 iemRecalEffOpSize(pVCpu);
1367
1368 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1369 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1370 }
1371
1372 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1373 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1374}
1375
1376
1377/**
1378 * @opcode 0x4c
1379 */
1380FNIEMOP_DEF(iemOp_dec_eSP)
1381{
1382 /*
1383 * This is a REX prefix in 64-bit mode.
1384 */
1385 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1386 {
1387 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1388 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1389 pVCpu->iem.s.uRexReg = 1 << 3;
1390 iemRecalEffOpSize(pVCpu);
1391
1392 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1393 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1394 }
1395
1396 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1397 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1398}
1399
1400
1401/**
1402 * @opcode 0x4d
1403 */
1404FNIEMOP_DEF(iemOp_dec_eBP)
1405{
1406 /*
1407 * This is a REX prefix in 64-bit mode.
1408 */
1409 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1410 {
1411 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1412 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1413 pVCpu->iem.s.uRexReg = 1 << 3;
1414 pVCpu->iem.s.uRexB = 1 << 3;
1415 iemRecalEffOpSize(pVCpu);
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419 }
1420
1421 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1422 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1423}
1424
1425
1426/**
1427 * @opcode 0x4e
1428 */
1429FNIEMOP_DEF(iemOp_dec_eSI)
1430{
1431 /*
1432 * This is a REX prefix in 64-bit mode.
1433 */
1434 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1435 {
1436 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1437 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1438 pVCpu->iem.s.uRexReg = 1 << 3;
1439 pVCpu->iem.s.uRexIndex = 1 << 3;
1440 iemRecalEffOpSize(pVCpu);
1441
1442 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1443 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1444 }
1445
1446 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1447 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1448}
1449
1450
1451/**
1452 * @opcode 0x4f
1453 */
1454FNIEMOP_DEF(iemOp_dec_eDI)
1455{
1456 /*
1457 * This is a REX prefix in 64-bit mode.
1458 */
1459 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1460 {
1461 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1462 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1463 pVCpu->iem.s.uRexReg = 1 << 3;
1464 pVCpu->iem.s.uRexB = 1 << 3;
1465 pVCpu->iem.s.uRexIndex = 1 << 3;
1466 iemRecalEffOpSize(pVCpu);
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470 }
1471
1472 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1473 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1474}
1475
1476
1477/**
1478 * Common 'push register' helper.
1479 */
1480FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1481{
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1483 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1484 {
1485 iReg |= pVCpu->iem.s.uRexB;
1486 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1487 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1488 }
1489
1490 switch (pVCpu->iem.s.enmEffOpSize)
1491 {
1492 case IEMMODE_16BIT:
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(uint16_t, u16Value);
1495 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1496 IEM_MC_PUSH_U16(u16Value);
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 break;
1500
1501 case IEMMODE_32BIT:
1502 IEM_MC_BEGIN(0, 1);
1503 IEM_MC_LOCAL(uint32_t, u32Value);
1504 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1505 IEM_MC_PUSH_U32(u32Value);
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 break;
1509
1510 case IEMMODE_64BIT:
1511 IEM_MC_BEGIN(0, 1);
1512 IEM_MC_LOCAL(uint64_t, u64Value);
1513 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1514 IEM_MC_PUSH_U64(u64Value);
1515 IEM_MC_ADVANCE_RIP();
1516 IEM_MC_END();
1517 break;
1518 }
1519
1520 return VINF_SUCCESS;
1521}
1522
1523
1524/**
1525 * @opcode 0x50
1526 */
1527FNIEMOP_DEF(iemOp_push_eAX)
1528{
1529 IEMOP_MNEMONIC(push_rAX, "push rAX");
1530 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1531}
1532
1533
1534/**
1535 * @opcode 0x51
1536 */
1537FNIEMOP_DEF(iemOp_push_eCX)
1538{
1539 IEMOP_MNEMONIC(push_rCX, "push rCX");
1540 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1541}
1542
1543
1544/**
1545 * @opcode 0x52
1546 */
1547FNIEMOP_DEF(iemOp_push_eDX)
1548{
1549 IEMOP_MNEMONIC(push_rDX, "push rDX");
1550 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1551}
1552
1553
1554/**
1555 * @opcode 0x53
1556 */
1557FNIEMOP_DEF(iemOp_push_eBX)
1558{
1559 IEMOP_MNEMONIC(push_rBX, "push rBX");
1560 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1561}
1562
1563
1564/**
1565 * @opcode 0x54
1566 */
1567FNIEMOP_DEF(iemOp_push_eSP)
1568{
1569 IEMOP_MNEMONIC(push_rSP, "push rSP");
1570 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1571 {
1572 IEM_MC_BEGIN(0, 1);
1573 IEM_MC_LOCAL(uint16_t, u16Value);
1574 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1575 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1576 IEM_MC_PUSH_U16(u16Value);
1577 IEM_MC_ADVANCE_RIP();
1578 IEM_MC_END();
1579 }
1580 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1581}
1582
1583
1584/**
1585 * @opcode 0x55
1586 */
1587FNIEMOP_DEF(iemOp_push_eBP)
1588{
1589 IEMOP_MNEMONIC(push_rBP, "push rBP");
1590 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1591}
1592
1593
1594/**
1595 * @opcode 0x56
1596 */
1597FNIEMOP_DEF(iemOp_push_eSI)
1598{
1599 IEMOP_MNEMONIC(push_rSI, "push rSI");
1600 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1601}
1602
1603
1604/**
1605 * @opcode 0x57
1606 */
1607FNIEMOP_DEF(iemOp_push_eDI)
1608{
1609 IEMOP_MNEMONIC(push_rDI, "push rDI");
1610 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1611}
1612
1613
1614/**
1615 * Common 'pop register' helper.
1616 */
1617FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1618{
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1620 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1621 {
1622 iReg |= pVCpu->iem.s.uRexB;
1623 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1624 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1625 }
1626
1627 switch (pVCpu->iem.s.enmEffOpSize)
1628 {
1629 case IEMMODE_16BIT:
1630 IEM_MC_BEGIN(0, 1);
1631 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1632 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1633 IEM_MC_POP_U16(pu16Dst);
1634 IEM_MC_ADVANCE_RIP();
1635 IEM_MC_END();
1636 break;
1637
1638 case IEMMODE_32BIT:
1639 IEM_MC_BEGIN(0, 1);
1640 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1641 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1642 IEM_MC_POP_U32(pu32Dst);
1643 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1644 IEM_MC_ADVANCE_RIP();
1645 IEM_MC_END();
1646 break;
1647
1648 case IEMMODE_64BIT:
1649 IEM_MC_BEGIN(0, 1);
1650 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1651 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1652 IEM_MC_POP_U64(pu64Dst);
1653 IEM_MC_ADVANCE_RIP();
1654 IEM_MC_END();
1655 break;
1656 }
1657
1658 return VINF_SUCCESS;
1659}
1660
1661
1662/**
1663 * @opcode 0x58
1664 */
1665FNIEMOP_DEF(iemOp_pop_eAX)
1666{
1667 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1668 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1669}
1670
1671
1672/**
1673 * @opcode 0x59
1674 */
1675FNIEMOP_DEF(iemOp_pop_eCX)
1676{
1677 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1678 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1679}
1680
1681
1682/**
1683 * @opcode 0x5a
1684 */
1685FNIEMOP_DEF(iemOp_pop_eDX)
1686{
1687 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1688 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1689}
1690
1691
1692/**
1693 * @opcode 0x5b
1694 */
1695FNIEMOP_DEF(iemOp_pop_eBX)
1696{
1697 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1698 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1699}
1700
1701
1702/**
1703 * @opcode 0x5c
1704 */
1705FNIEMOP_DEF(iemOp_pop_eSP)
1706{
1707 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1708 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1709 {
1710 if (pVCpu->iem.s.uRexB)
1711 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1712 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1713 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1714 }
1715
1716 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1717 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1718 /** @todo add testcase for this instruction. */
1719 switch (pVCpu->iem.s.enmEffOpSize)
1720 {
1721 case IEMMODE_16BIT:
1722 IEM_MC_BEGIN(0, 1);
1723 IEM_MC_LOCAL(uint16_t, u16Dst);
1724 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1725 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1726 IEM_MC_ADVANCE_RIP();
1727 IEM_MC_END();
1728 break;
1729
1730 case IEMMODE_32BIT:
1731 IEM_MC_BEGIN(0, 1);
1732 IEM_MC_LOCAL(uint32_t, u32Dst);
1733 IEM_MC_POP_U32(&u32Dst);
1734 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1735 IEM_MC_ADVANCE_RIP();
1736 IEM_MC_END();
1737 break;
1738
1739 case IEMMODE_64BIT:
1740 IEM_MC_BEGIN(0, 1);
1741 IEM_MC_LOCAL(uint64_t, u64Dst);
1742 IEM_MC_POP_U64(&u64Dst);
1743 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1744 IEM_MC_ADVANCE_RIP();
1745 IEM_MC_END();
1746 break;
1747 }
1748
1749 return VINF_SUCCESS;
1750}
1751
1752
1753/**
1754 * @opcode 0x5d
1755 */
1756FNIEMOP_DEF(iemOp_pop_eBP)
1757{
1758 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1759 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1760}
1761
1762
1763/**
1764 * @opcode 0x5e
1765 */
1766FNIEMOP_DEF(iemOp_pop_eSI)
1767{
1768 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1769 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1770}
1771
1772
1773/**
1774 * @opcode 0x5f
1775 */
1776FNIEMOP_DEF(iemOp_pop_eDI)
1777{
1778 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1779 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1780}
1781
1782
1783/**
1784 * @opcode 0x60
1785 */
1786FNIEMOP_DEF(iemOp_pusha)
1787{
1788 IEMOP_MNEMONIC(pusha, "pusha");
1789 IEMOP_HLP_MIN_186();
1790 IEMOP_HLP_NO_64BIT();
1791 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1793 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1794 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1795}
1796
1797
1798/**
1799 * @opcode 0x61
1800 */
1801FNIEMOP_DEF(iemOp_popa__mvex)
1802{
1803 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1804 {
1805 IEMOP_MNEMONIC(popa, "popa");
1806 IEMOP_HLP_MIN_186();
1807 IEMOP_HLP_NO_64BIT();
1808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1809 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1810 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1812 }
1813 IEMOP_MNEMONIC(mvex, "mvex");
1814 Log(("mvex prefix is not supported!\n"));
1815 return IEMOP_RAISE_INVALID_OPCODE();
1816}
1817
1818
1819/**
1820 * @opcode 0x62
1821 * @opmnemonic bound
1822 * @op1 Gv
1823 * @op2 Ma
1824 * @opmincpu 80186
1825 * @ophints harmless invalid_64
1826 * @optest op1=0 op2=0 ->
1827 * @optest op1=1 op2=0 -> value.xcpt=5
1828 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1829 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1830 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1831 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1832 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1833 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1834 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1835 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1836 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1837 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1838 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1839 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1840 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1841 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1842 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1843 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1844 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1845 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1846 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1847 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1848 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1849 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1850 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1851 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1852 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1853 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1854 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1855 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1856 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1857 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1858 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1859 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1860 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1861 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1862 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1863 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1864 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1865 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1866 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1867 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1868 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1869 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1870 */
1871FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1872{
1873 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1874 compatability mode it is invalid with MOD=3.
1875
1876 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1877 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1878 given as R and X without an exact description, so we assume it builds on
1879 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1880 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1881 uint8_t bRm;
1882 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1883 {
1884 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1885 IEMOP_HLP_MIN_186();
1886 IEM_OPCODE_GET_NEXT_U8(&bRm);
1887 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1888 {
1889 /** @todo testcase: check that there are two memory accesses involved. Check
1890 * whether they're both read before the \#BR triggers. */
1891 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1892 {
1893 IEM_MC_BEGIN(3, 1);
1894 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1895 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1896 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1898
1899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1901
1902 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1903 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1904 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1905
1906 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1907 IEM_MC_END();
1908 }
1909 else /* 32-bit operands */
1910 {
1911 IEM_MC_BEGIN(3, 1);
1912 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1913 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1914 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1916
1917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1919
1920 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1921 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1922 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1923
1924 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1925 IEM_MC_END();
1926 }
1927 }
1928
1929 /*
1930 * @opdone
1931 */
1932 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1933 {
1934 /* Note that there is no need for the CPU to fetch further bytes
1935 here because MODRM.MOD == 3. */
1936 Log(("evex not supported by the guest CPU!\n"));
1937 return IEMOP_RAISE_INVALID_OPCODE();
1938 }
1939 }
1940 else
1941 {
1942 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1943 * does modr/m read, whereas AMD probably doesn't... */
1944 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1945 {
1946 Log(("evex not supported by the guest CPU!\n"));
1947 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
1948 }
1949 IEM_OPCODE_GET_NEXT_U8(&bRm);
1950 }
1951
1952 IEMOP_MNEMONIC(evex, "evex");
1953 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1954 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1955 Log(("evex prefix is not implemented!\n"));
1956 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1957}
1958
1959
1960/** Opcode 0x63 - non-64-bit modes. */
1961FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1962{
1963 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1964 IEMOP_HLP_MIN_286();
1965 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1967
1968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1969 {
1970 /* Register */
1971 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1972 IEM_MC_BEGIN(3, 0);
1973 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1974 IEM_MC_ARG(uint16_t, u16Src, 1);
1975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1976
1977 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1978 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1979 IEM_MC_REF_EFLAGS(pEFlags);
1980 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1981
1982 IEM_MC_ADVANCE_RIP();
1983 IEM_MC_END();
1984 }
1985 else
1986 {
1987 /* Memory */
1988 IEM_MC_BEGIN(3, 2);
1989 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1990 IEM_MC_ARG(uint16_t, u16Src, 1);
1991 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1993
1994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1995 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1996 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1997 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1998 IEM_MC_FETCH_EFLAGS(EFlags);
1999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2000
2001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2002 IEM_MC_COMMIT_EFLAGS(EFlags);
2003 IEM_MC_ADVANCE_RIP();
2004 IEM_MC_END();
2005 }
2006 return VINF_SUCCESS;
2007
2008}
2009
2010
2011/**
2012 * @opcode 0x63
2013 *
2014 * @note This is a weird one. It works like a regular move instruction if
2015 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2016 * @todo This definitely needs a testcase to verify the odd cases. */
2017FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2018{
2019 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2020
2021 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2023
2024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2025 {
2026 /*
2027 * Register to register.
2028 */
2029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2030 IEM_MC_BEGIN(0, 1);
2031 IEM_MC_LOCAL(uint64_t, u64Value);
2032 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2033 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2034 IEM_MC_ADVANCE_RIP();
2035 IEM_MC_END();
2036 }
2037 else
2038 {
2039 /*
2040 * We're loading a register from memory.
2041 */
2042 IEM_MC_BEGIN(0, 2);
2043 IEM_MC_LOCAL(uint64_t, u64Value);
2044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2047 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2048 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2049 IEM_MC_ADVANCE_RIP();
2050 IEM_MC_END();
2051 }
2052 return VINF_SUCCESS;
2053}
2054
2055
2056/**
2057 * @opcode 0x64
2058 * @opmnemonic segfs
2059 * @opmincpu 80386
2060 * @opgroup og_prefixes
2061 */
2062FNIEMOP_DEF(iemOp_seg_FS)
2063{
2064 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2065 IEMOP_HLP_MIN_386();
2066
2067 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2068 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2069
2070 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2071 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2072}
2073
2074
2075/**
2076 * @opcode 0x65
2077 * @opmnemonic seggs
2078 * @opmincpu 80386
2079 * @opgroup og_prefixes
2080 */
2081FNIEMOP_DEF(iemOp_seg_GS)
2082{
2083 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2084 IEMOP_HLP_MIN_386();
2085
2086 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2087 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2088
2089 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2090 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2091}
2092
2093
2094/**
2095 * @opcode 0x66
2096 * @opmnemonic opsize
2097 * @openc prefix
2098 * @opmincpu 80386
2099 * @ophints harmless
2100 * @opgroup og_prefixes
2101 */
2102FNIEMOP_DEF(iemOp_op_size)
2103{
2104 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2105 IEMOP_HLP_MIN_386();
2106
2107 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2108 iemRecalEffOpSize(pVCpu);
2109
2110 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2111 when REPZ or REPNZ are present. */
2112 if (pVCpu->iem.s.idxPrefix == 0)
2113 pVCpu->iem.s.idxPrefix = 1;
2114
2115 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2116 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2117}
2118
2119
2120/**
2121 * @opcode 0x67
2122 * @opmnemonic addrsize
2123 * @openc prefix
2124 * @opmincpu 80386
2125 * @ophints harmless
2126 * @opgroup og_prefixes
2127 */
2128FNIEMOP_DEF(iemOp_addr_size)
2129{
2130 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2131 IEMOP_HLP_MIN_386();
2132
2133 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2134 switch (pVCpu->iem.s.enmDefAddrMode)
2135 {
2136 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2137 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2138 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2139 default: AssertFailed();
2140 }
2141
2142 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2143 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2144}
2145
2146
2147/**
2148 * @opcode 0x68
2149 */
2150FNIEMOP_DEF(iemOp_push_Iz)
2151{
2152 IEMOP_MNEMONIC(push_Iz, "push Iz");
2153 IEMOP_HLP_MIN_186();
2154 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2155 switch (pVCpu->iem.s.enmEffOpSize)
2156 {
2157 case IEMMODE_16BIT:
2158 {
2159 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2161 IEM_MC_BEGIN(0,0);
2162 IEM_MC_PUSH_U16(u16Imm);
2163 IEM_MC_ADVANCE_RIP();
2164 IEM_MC_END();
2165 return VINF_SUCCESS;
2166 }
2167
2168 case IEMMODE_32BIT:
2169 {
2170 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2172 IEM_MC_BEGIN(0,0);
2173 IEM_MC_PUSH_U32(u32Imm);
2174 IEM_MC_ADVANCE_RIP();
2175 IEM_MC_END();
2176 return VINF_SUCCESS;
2177 }
2178
2179 case IEMMODE_64BIT:
2180 {
2181 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_BEGIN(0,0);
2184 IEM_MC_PUSH_U64(u64Imm);
2185 IEM_MC_ADVANCE_RIP();
2186 IEM_MC_END();
2187 return VINF_SUCCESS;
2188 }
2189
2190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2191 }
2192}
2193
2194
2195/**
2196 * @opcode 0x69
2197 */
2198FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2199{
2200 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2201 IEMOP_HLP_MIN_186();
2202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2204
2205 switch (pVCpu->iem.s.enmEffOpSize)
2206 {
2207 case IEMMODE_16BIT:
2208 {
2209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2210 {
2211 /* register operand */
2212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214
2215 IEM_MC_BEGIN(3, 1);
2216 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2217 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2218 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2219 IEM_MC_LOCAL(uint16_t, u16Tmp);
2220
2221 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2222 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2223 IEM_MC_REF_EFLAGS(pEFlags);
2224 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2225 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2226
2227 IEM_MC_ADVANCE_RIP();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /* memory operand */
2233 IEM_MC_BEGIN(3, 2);
2234 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2235 IEM_MC_ARG(uint16_t, u16Src, 1);
2236 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2237 IEM_MC_LOCAL(uint16_t, u16Tmp);
2238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2239
2240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEM_MC_ASSIGN(u16Src, u16Imm);
2243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2244 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2245 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2246 IEM_MC_REF_EFLAGS(pEFlags);
2247 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2248 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2249
2250 IEM_MC_ADVANCE_RIP();
2251 IEM_MC_END();
2252 }
2253 return VINF_SUCCESS;
2254 }
2255
2256 case IEMMODE_32BIT:
2257 {
2258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2259 {
2260 /* register operand */
2261 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263
2264 IEM_MC_BEGIN(3, 1);
2265 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2266 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2268 IEM_MC_LOCAL(uint32_t, u32Tmp);
2269
2270 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2271 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2272 IEM_MC_REF_EFLAGS(pEFlags);
2273 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2274 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2275
2276 IEM_MC_ADVANCE_RIP();
2277 IEM_MC_END();
2278 }
2279 else
2280 {
2281 /* memory operand */
2282 IEM_MC_BEGIN(3, 2);
2283 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2284 IEM_MC_ARG(uint32_t, u32Src, 1);
2285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2286 IEM_MC_LOCAL(uint32_t, u32Tmp);
2287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2288
2289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2290 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2291 IEM_MC_ASSIGN(u32Src, u32Imm);
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2294 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2295 IEM_MC_REF_EFLAGS(pEFlags);
2296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2297 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2298
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 return VINF_SUCCESS;
2303 }
2304
2305 case IEMMODE_64BIT:
2306 {
2307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2308 {
2309 /* register operand */
2310 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2312
2313 IEM_MC_BEGIN(3, 1);
2314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2315 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2317 IEM_MC_LOCAL(uint64_t, u64Tmp);
2318
2319 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2320 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2321 IEM_MC_REF_EFLAGS(pEFlags);
2322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2323 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2324
2325 IEM_MC_ADVANCE_RIP();
2326 IEM_MC_END();
2327 }
2328 else
2329 {
2330 /* memory operand */
2331 IEM_MC_BEGIN(3, 2);
2332 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2333 IEM_MC_ARG(uint64_t, u64Src, 1);
2334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2335 IEM_MC_LOCAL(uint64_t, u64Tmp);
2336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2337
2338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2339 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2340 IEM_MC_ASSIGN(u64Src, u64Imm);
2341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2342 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2343 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2344 IEM_MC_REF_EFLAGS(pEFlags);
2345 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2346 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2347
2348 IEM_MC_ADVANCE_RIP();
2349 IEM_MC_END();
2350 }
2351 return VINF_SUCCESS;
2352 }
2353 }
2354 AssertFailedReturn(VERR_IEM_IPE_9);
2355}
2356
2357
2358/**
2359 * @opcode 0x6a
2360 */
2361FNIEMOP_DEF(iemOp_push_Ib)
2362{
2363 IEMOP_MNEMONIC(push_Ib, "push Ib");
2364 IEMOP_HLP_MIN_186();
2365 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2367 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2368
2369 IEM_MC_BEGIN(0,0);
2370 switch (pVCpu->iem.s.enmEffOpSize)
2371 {
2372 case IEMMODE_16BIT:
2373 IEM_MC_PUSH_U16(i8Imm);
2374 break;
2375 case IEMMODE_32BIT:
2376 IEM_MC_PUSH_U32(i8Imm);
2377 break;
2378 case IEMMODE_64BIT:
2379 IEM_MC_PUSH_U64(i8Imm);
2380 break;
2381 }
2382 IEM_MC_ADVANCE_RIP();
2383 IEM_MC_END();
2384 return VINF_SUCCESS;
2385}
2386
2387
2388/**
2389 * @opcode 0x6b
2390 */
2391FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2392{
2393 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2394 IEMOP_HLP_MIN_186();
2395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2397
2398 switch (pVCpu->iem.s.enmEffOpSize)
2399 {
2400 case IEMMODE_16BIT:
2401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2402 {
2403 /* register operand */
2404 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2406
2407 IEM_MC_BEGIN(3, 1);
2408 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2409 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2411 IEM_MC_LOCAL(uint16_t, u16Tmp);
2412
2413 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2414 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2415 IEM_MC_REF_EFLAGS(pEFlags);
2416 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2417 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2418
2419 IEM_MC_ADVANCE_RIP();
2420 IEM_MC_END();
2421 }
2422 else
2423 {
2424 /* memory operand */
2425 IEM_MC_BEGIN(3, 2);
2426 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2427 IEM_MC_ARG(uint16_t, u16Src, 1);
2428 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2429 IEM_MC_LOCAL(uint16_t, u16Tmp);
2430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2431
2432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2433 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2434 IEM_MC_ASSIGN(u16Src, u16Imm);
2435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2436 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2437 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2438 IEM_MC_REF_EFLAGS(pEFlags);
2439 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2440 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2441
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 return VINF_SUCCESS;
2446
2447 case IEMMODE_32BIT:
2448 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2449 {
2450 /* register operand */
2451 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2453
2454 IEM_MC_BEGIN(3, 1);
2455 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2456 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2458 IEM_MC_LOCAL(uint32_t, u32Tmp);
2459
2460 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2461 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2462 IEM_MC_REF_EFLAGS(pEFlags);
2463 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2464 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2465
2466 IEM_MC_ADVANCE_RIP();
2467 IEM_MC_END();
2468 }
2469 else
2470 {
2471 /* memory operand */
2472 IEM_MC_BEGIN(3, 2);
2473 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2474 IEM_MC_ARG(uint32_t, u32Src, 1);
2475 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2476 IEM_MC_LOCAL(uint32_t, u32Tmp);
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2480 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2481 IEM_MC_ASSIGN(u32Src, u32Imm);
2482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2483 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2484 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2485 IEM_MC_REF_EFLAGS(pEFlags);
2486 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2487 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2488
2489 IEM_MC_ADVANCE_RIP();
2490 IEM_MC_END();
2491 }
2492 return VINF_SUCCESS;
2493
2494 case IEMMODE_64BIT:
2495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2496 {
2497 /* register operand */
2498 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500
2501 IEM_MC_BEGIN(3, 1);
2502 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2503 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint64_t, u64Tmp);
2506
2507 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2508 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2509 IEM_MC_REF_EFLAGS(pEFlags);
2510 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2511 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2512
2513 IEM_MC_ADVANCE_RIP();
2514 IEM_MC_END();
2515 }
2516 else
2517 {
2518 /* memory operand */
2519 IEM_MC_BEGIN(3, 2);
2520 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2521 IEM_MC_ARG(uint64_t, u64Src, 1);
2522 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2523 IEM_MC_LOCAL(uint64_t, u64Tmp);
2524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2525
2526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2527 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2528 IEM_MC_ASSIGN(u64Src, u64Imm);
2529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2530 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2531 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2532 IEM_MC_REF_EFLAGS(pEFlags);
2533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2534 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2535
2536 IEM_MC_ADVANCE_RIP();
2537 IEM_MC_END();
2538 }
2539 return VINF_SUCCESS;
2540 }
2541 AssertFailedReturn(VERR_IEM_IPE_8);
2542}
2543
2544
2545/**
2546 * @opcode 0x6c
2547 */
2548FNIEMOP_DEF(iemOp_insb_Yb_DX)
2549{
2550 IEMOP_HLP_MIN_186();
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2552 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2553 {
2554 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2555 switch (pVCpu->iem.s.enmEffAddrMode)
2556 {
2557 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2558 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2559 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2561 }
2562 }
2563 else
2564 {
2565 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2566 switch (pVCpu->iem.s.enmEffAddrMode)
2567 {
2568 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2569 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2570 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2572 }
2573 }
2574}
2575
2576
2577/**
2578 * @opcode 0x6d
2579 */
2580FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2581{
2582 IEMOP_HLP_MIN_186();
2583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2584 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2585 {
2586 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2587 switch (pVCpu->iem.s.enmEffOpSize)
2588 {
2589 case IEMMODE_16BIT:
2590 switch (pVCpu->iem.s.enmEffAddrMode)
2591 {
2592 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2593 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2594 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2596 }
2597 break;
2598 case IEMMODE_64BIT:
2599 case IEMMODE_32BIT:
2600 switch (pVCpu->iem.s.enmEffAddrMode)
2601 {
2602 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2603 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2604 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2606 }
2607 break;
2608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2609 }
2610 }
2611 else
2612 {
2613 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2614 switch (pVCpu->iem.s.enmEffOpSize)
2615 {
2616 case IEMMODE_16BIT:
2617 switch (pVCpu->iem.s.enmEffAddrMode)
2618 {
2619 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2623 }
2624 break;
2625 case IEMMODE_64BIT:
2626 case IEMMODE_32BIT:
2627 switch (pVCpu->iem.s.enmEffAddrMode)
2628 {
2629 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2630 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2631 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2633 }
2634 break;
2635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2636 }
2637 }
2638}
2639
2640
2641/**
2642 * @opcode 0x6e
2643 */
2644FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2645{
2646 IEMOP_HLP_MIN_186();
2647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2648 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2649 {
2650 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2651 switch (pVCpu->iem.s.enmEffAddrMode)
2652 {
2653 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2657 }
2658 }
2659 else
2660 {
2661 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2662 switch (pVCpu->iem.s.enmEffAddrMode)
2663 {
2664 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2665 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2666 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2668 }
2669 }
2670}
2671
2672
2673/**
2674 * @opcode 0x6f
2675 */
2676FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2677{
2678 IEMOP_HLP_MIN_186();
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2680 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2681 {
2682 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2683 switch (pVCpu->iem.s.enmEffOpSize)
2684 {
2685 case IEMMODE_16BIT:
2686 switch (pVCpu->iem.s.enmEffAddrMode)
2687 {
2688 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2689 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2690 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2692 }
2693 break;
2694 case IEMMODE_64BIT:
2695 case IEMMODE_32BIT:
2696 switch (pVCpu->iem.s.enmEffAddrMode)
2697 {
2698 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2699 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2700 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2702 }
2703 break;
2704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2705 }
2706 }
2707 else
2708 {
2709 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2710 switch (pVCpu->iem.s.enmEffOpSize)
2711 {
2712 case IEMMODE_16BIT:
2713 switch (pVCpu->iem.s.enmEffAddrMode)
2714 {
2715 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2719 }
2720 break;
2721 case IEMMODE_64BIT:
2722 case IEMMODE_32BIT:
2723 switch (pVCpu->iem.s.enmEffAddrMode)
2724 {
2725 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2726 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2727 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2729 }
2730 break;
2731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2732 }
2733 }
2734}
2735
2736
2737/**
2738 * @opcode 0x70
2739 */
2740FNIEMOP_DEF(iemOp_jo_Jb)
2741{
2742 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2743 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2745 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2746
2747 IEM_MC_BEGIN(0, 0);
2748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2749 IEM_MC_REL_JMP_S8(i8Imm);
2750 } IEM_MC_ELSE() {
2751 IEM_MC_ADVANCE_RIP();
2752 } IEM_MC_ENDIF();
2753 IEM_MC_END();
2754 return VINF_SUCCESS;
2755}
2756
2757
2758/**
2759 * @opcode 0x71
2760 */
2761FNIEMOP_DEF(iemOp_jno_Jb)
2762{
2763 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2764 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2766 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2767
2768 IEM_MC_BEGIN(0, 0);
2769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2770 IEM_MC_ADVANCE_RIP();
2771 } IEM_MC_ELSE() {
2772 IEM_MC_REL_JMP_S8(i8Imm);
2773 } IEM_MC_ENDIF();
2774 IEM_MC_END();
2775 return VINF_SUCCESS;
2776}
2777
2778/**
2779 * @opcode 0x72
2780 */
2781FNIEMOP_DEF(iemOp_jc_Jb)
2782{
2783 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2784 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2787
2788 IEM_MC_BEGIN(0, 0);
2789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2790 IEM_MC_REL_JMP_S8(i8Imm);
2791 } IEM_MC_ELSE() {
2792 IEM_MC_ADVANCE_RIP();
2793 } IEM_MC_ENDIF();
2794 IEM_MC_END();
2795 return VINF_SUCCESS;
2796}
2797
2798
2799/**
2800 * @opcode 0x73
2801 */
2802FNIEMOP_DEF(iemOp_jnc_Jb)
2803{
2804 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2805 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2808
2809 IEM_MC_BEGIN(0, 0);
2810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2811 IEM_MC_ADVANCE_RIP();
2812 } IEM_MC_ELSE() {
2813 IEM_MC_REL_JMP_S8(i8Imm);
2814 } IEM_MC_ENDIF();
2815 IEM_MC_END();
2816 return VINF_SUCCESS;
2817}
2818
2819
2820/**
2821 * @opcode 0x74
2822 */
2823FNIEMOP_DEF(iemOp_je_Jb)
2824{
2825 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2826 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2828 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2829
2830 IEM_MC_BEGIN(0, 0);
2831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2832 IEM_MC_REL_JMP_S8(i8Imm);
2833 } IEM_MC_ELSE() {
2834 IEM_MC_ADVANCE_RIP();
2835 } IEM_MC_ENDIF();
2836 IEM_MC_END();
2837 return VINF_SUCCESS;
2838}
2839
2840
2841/**
2842 * @opcode 0x75
2843 */
2844FNIEMOP_DEF(iemOp_jne_Jb)
2845{
2846 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2847 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2849 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2850
2851 IEM_MC_BEGIN(0, 0);
2852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2853 IEM_MC_ADVANCE_RIP();
2854 } IEM_MC_ELSE() {
2855 IEM_MC_REL_JMP_S8(i8Imm);
2856 } IEM_MC_ENDIF();
2857 IEM_MC_END();
2858 return VINF_SUCCESS;
2859}
2860
2861
2862/**
2863 * @opcode 0x76
2864 */
2865FNIEMOP_DEF(iemOp_jbe_Jb)
2866{
2867 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2868 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2870 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2871
2872 IEM_MC_BEGIN(0, 0);
2873 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2874 IEM_MC_REL_JMP_S8(i8Imm);
2875 } IEM_MC_ELSE() {
2876 IEM_MC_ADVANCE_RIP();
2877 } IEM_MC_ENDIF();
2878 IEM_MC_END();
2879 return VINF_SUCCESS;
2880}
2881
2882
2883/**
2884 * @opcode 0x77
2885 */
2886FNIEMOP_DEF(iemOp_jnbe_Jb)
2887{
2888 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2889 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2892
2893 IEM_MC_BEGIN(0, 0);
2894 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2895 IEM_MC_ADVANCE_RIP();
2896 } IEM_MC_ELSE() {
2897 IEM_MC_REL_JMP_S8(i8Imm);
2898 } IEM_MC_ENDIF();
2899 IEM_MC_END();
2900 return VINF_SUCCESS;
2901}
2902
2903
2904/**
2905 * @opcode 0x78
2906 */
2907FNIEMOP_DEF(iemOp_js_Jb)
2908{
2909 IEMOP_MNEMONIC(js_Jb, "js Jb");
2910 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2913
2914 IEM_MC_BEGIN(0, 0);
2915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2916 IEM_MC_REL_JMP_S8(i8Imm);
2917 } IEM_MC_ELSE() {
2918 IEM_MC_ADVANCE_RIP();
2919 } IEM_MC_ENDIF();
2920 IEM_MC_END();
2921 return VINF_SUCCESS;
2922}
2923
2924
2925/**
2926 * @opcode 0x79
2927 */
2928FNIEMOP_DEF(iemOp_jns_Jb)
2929{
2930 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2934
2935 IEM_MC_BEGIN(0, 0);
2936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2937 IEM_MC_ADVANCE_RIP();
2938 } IEM_MC_ELSE() {
2939 IEM_MC_REL_JMP_S8(i8Imm);
2940 } IEM_MC_ENDIF();
2941 IEM_MC_END();
2942 return VINF_SUCCESS;
2943}
2944
2945
2946/**
2947 * @opcode 0x7a
2948 */
2949FNIEMOP_DEF(iemOp_jp_Jb)
2950{
2951 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2952 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2955
2956 IEM_MC_BEGIN(0, 0);
2957 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2958 IEM_MC_REL_JMP_S8(i8Imm);
2959 } IEM_MC_ELSE() {
2960 IEM_MC_ADVANCE_RIP();
2961 } IEM_MC_ENDIF();
2962 IEM_MC_END();
2963 return VINF_SUCCESS;
2964}
2965
2966
2967/**
2968 * @opcode 0x7b
2969 */
2970FNIEMOP_DEF(iemOp_jnp_Jb)
2971{
2972 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2973 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2976
2977 IEM_MC_BEGIN(0, 0);
2978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2979 IEM_MC_ADVANCE_RIP();
2980 } IEM_MC_ELSE() {
2981 IEM_MC_REL_JMP_S8(i8Imm);
2982 } IEM_MC_ENDIF();
2983 IEM_MC_END();
2984 return VINF_SUCCESS;
2985}
2986
2987
2988/**
2989 * @opcode 0x7c
2990 */
2991FNIEMOP_DEF(iemOp_jl_Jb)
2992{
2993 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2994 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2996 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2997
2998 IEM_MC_BEGIN(0, 0);
2999 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3000 IEM_MC_REL_JMP_S8(i8Imm);
3001 } IEM_MC_ELSE() {
3002 IEM_MC_ADVANCE_RIP();
3003 } IEM_MC_ENDIF();
3004 IEM_MC_END();
3005 return VINF_SUCCESS;
3006}
3007
3008
3009/**
3010 * @opcode 0x7d
3011 */
3012FNIEMOP_DEF(iemOp_jnl_Jb)
3013{
3014 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3015 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3018
3019 IEM_MC_BEGIN(0, 0);
3020 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3021 IEM_MC_ADVANCE_RIP();
3022 } IEM_MC_ELSE() {
3023 IEM_MC_REL_JMP_S8(i8Imm);
3024 } IEM_MC_ENDIF();
3025 IEM_MC_END();
3026 return VINF_SUCCESS;
3027}
3028
3029
3030/**
3031 * @opcode 0x7e
3032 */
3033FNIEMOP_DEF(iemOp_jle_Jb)
3034{
3035 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3036 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3039
3040 IEM_MC_BEGIN(0, 0);
3041 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3042 IEM_MC_REL_JMP_S8(i8Imm);
3043 } IEM_MC_ELSE() {
3044 IEM_MC_ADVANCE_RIP();
3045 } IEM_MC_ENDIF();
3046 IEM_MC_END();
3047 return VINF_SUCCESS;
3048}
3049
3050
3051/**
3052 * @opcode 0x7f
3053 */
3054FNIEMOP_DEF(iemOp_jnle_Jb)
3055{
3056 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3057 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3060
3061 IEM_MC_BEGIN(0, 0);
3062 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3063 IEM_MC_ADVANCE_RIP();
3064 } IEM_MC_ELSE() {
3065 IEM_MC_REL_JMP_S8(i8Imm);
3066 } IEM_MC_ENDIF();
3067 IEM_MC_END();
3068 return VINF_SUCCESS;
3069}
3070
3071
3072/**
3073 * @opcode 0x80
3074 */
3075FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3076{
3077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3078 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3079 {
3080 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3081 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3082 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3083 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3084 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3085 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3086 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3087 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3088 }
3089 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3090
3091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3092 {
3093 /* register target */
3094 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_BEGIN(3, 0);
3097 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3098 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3100
3101 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3102 IEM_MC_REF_EFLAGS(pEFlags);
3103 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3104
3105 IEM_MC_ADVANCE_RIP();
3106 IEM_MC_END();
3107 }
3108 else
3109 {
3110 /* memory target */
3111 uint32_t fAccess;
3112 if (pImpl->pfnLockedU8)
3113 fAccess = IEM_ACCESS_DATA_RW;
3114 else /* CMP */
3115 fAccess = IEM_ACCESS_DATA_R;
3116 IEM_MC_BEGIN(3, 2);
3117 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3118 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3120
3121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3122 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3123 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3124 if (pImpl->pfnLockedU8)
3125 IEMOP_HLP_DONE_DECODING();
3126 else
3127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3128
3129 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3130 IEM_MC_FETCH_EFLAGS(EFlags);
3131 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3133 else
3134 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3135
3136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3137 IEM_MC_COMMIT_EFLAGS(EFlags);
3138 IEM_MC_ADVANCE_RIP();
3139 IEM_MC_END();
3140 }
3141 return VINF_SUCCESS;
3142}
3143
3144
3145/**
3146 * @opcode 0x81
3147 */
3148FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3152 {
3153 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3154 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3155 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3156 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3157 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3158 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3159 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3160 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3161 }
3162 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3163
3164 switch (pVCpu->iem.s.enmEffOpSize)
3165 {
3166 case IEMMODE_16BIT:
3167 {
3168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3169 {
3170 /* register target */
3171 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3173 IEM_MC_BEGIN(3, 0);
3174 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3175 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3177
3178 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3179 IEM_MC_REF_EFLAGS(pEFlags);
3180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3181
3182 IEM_MC_ADVANCE_RIP();
3183 IEM_MC_END();
3184 }
3185 else
3186 {
3187 /* memory target */
3188 uint32_t fAccess;
3189 if (pImpl->pfnLockedU16)
3190 fAccess = IEM_ACCESS_DATA_RW;
3191 else /* CMP, TEST */
3192 fAccess = IEM_ACCESS_DATA_R;
3193 IEM_MC_BEGIN(3, 2);
3194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3195 IEM_MC_ARG(uint16_t, u16Src, 1);
3196 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3198
3199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEM_MC_ASSIGN(u16Src, u16Imm);
3202 if (pImpl->pfnLockedU16)
3203 IEMOP_HLP_DONE_DECODING();
3204 else
3205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3206 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3207 IEM_MC_FETCH_EFLAGS(EFlags);
3208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210 else
3211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3212
3213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3214 IEM_MC_COMMIT_EFLAGS(EFlags);
3215 IEM_MC_ADVANCE_RIP();
3216 IEM_MC_END();
3217 }
3218 break;
3219 }
3220
3221 case IEMMODE_32BIT:
3222 {
3223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3224 {
3225 /* register target */
3226 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_BEGIN(3, 0);
3229 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3230 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3232
3233 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3234 IEM_MC_REF_EFLAGS(pEFlags);
3235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3236 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3237
3238 IEM_MC_ADVANCE_RIP();
3239 IEM_MC_END();
3240 }
3241 else
3242 {
3243 /* memory target */
3244 uint32_t fAccess;
3245 if (pImpl->pfnLockedU32)
3246 fAccess = IEM_ACCESS_DATA_RW;
3247 else /* CMP, TEST */
3248 fAccess = IEM_ACCESS_DATA_R;
3249 IEM_MC_BEGIN(3, 2);
3250 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3251 IEM_MC_ARG(uint32_t, u32Src, 1);
3252 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3254
3255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3256 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3257 IEM_MC_ASSIGN(u32Src, u32Imm);
3258 if (pImpl->pfnLockedU32)
3259 IEMOP_HLP_DONE_DECODING();
3260 else
3261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3262 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3263 IEM_MC_FETCH_EFLAGS(EFlags);
3264 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3266 else
3267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3268
3269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3270 IEM_MC_COMMIT_EFLAGS(EFlags);
3271 IEM_MC_ADVANCE_RIP();
3272 IEM_MC_END();
3273 }
3274 break;
3275 }
3276
3277 case IEMMODE_64BIT:
3278 {
3279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3280 {
3281 /* register target */
3282 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3284 IEM_MC_BEGIN(3, 0);
3285 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3286 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3287 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3288
3289 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3290 IEM_MC_REF_EFLAGS(pEFlags);
3291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3292
3293 IEM_MC_ADVANCE_RIP();
3294 IEM_MC_END();
3295 }
3296 else
3297 {
3298 /* memory target */
3299 uint32_t fAccess;
3300 if (pImpl->pfnLockedU64)
3301 fAccess = IEM_ACCESS_DATA_RW;
3302 else /* CMP */
3303 fAccess = IEM_ACCESS_DATA_R;
3304 IEM_MC_BEGIN(3, 2);
3305 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3306 IEM_MC_ARG(uint64_t, u64Src, 1);
3307 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3309
3310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3312 if (pImpl->pfnLockedU64)
3313 IEMOP_HLP_DONE_DECODING();
3314 else
3315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3316 IEM_MC_ASSIGN(u64Src, u64Imm);
3317 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3318 IEM_MC_FETCH_EFLAGS(EFlags);
3319 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3321 else
3322 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3323
3324 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3325 IEM_MC_COMMIT_EFLAGS(EFlags);
3326 IEM_MC_ADVANCE_RIP();
3327 IEM_MC_END();
3328 }
3329 break;
3330 }
3331 }
3332 return VINF_SUCCESS;
3333}
3334
3335
3336/**
3337 * @opcode 0x82
3338 * @opmnemonic grp1_82
3339 * @opgroup og_groups
3340 */
3341FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3342{
3343 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3344 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3345}
3346
3347
3348/**
3349 * @opcode 0x83
3350 */
3351FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3352{
3353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3354 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3355 {
3356 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3357 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3358 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3359 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3360 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3361 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3362 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3363 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3364 }
3365 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3366 to the 386 even if absent in the intel reference manuals and some
3367 3rd party opcode listings. */
3368 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3369
3370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3371 {
3372 /*
3373 * Register target
3374 */
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3377 switch (pVCpu->iem.s.enmEffOpSize)
3378 {
3379 case IEMMODE_16BIT:
3380 {
3381 IEM_MC_BEGIN(3, 0);
3382 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3383 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3384 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3385
3386 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3387 IEM_MC_REF_EFLAGS(pEFlags);
3388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3389
3390 IEM_MC_ADVANCE_RIP();
3391 IEM_MC_END();
3392 break;
3393 }
3394
3395 case IEMMODE_32BIT:
3396 {
3397 IEM_MC_BEGIN(3, 0);
3398 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3399 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3400 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3401
3402 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3403 IEM_MC_REF_EFLAGS(pEFlags);
3404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3405 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3406
3407 IEM_MC_ADVANCE_RIP();
3408 IEM_MC_END();
3409 break;
3410 }
3411
3412 case IEMMODE_64BIT:
3413 {
3414 IEM_MC_BEGIN(3, 0);
3415 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3416 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3417 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3418
3419 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3420 IEM_MC_REF_EFLAGS(pEFlags);
3421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3422
3423 IEM_MC_ADVANCE_RIP();
3424 IEM_MC_END();
3425 break;
3426 }
3427 }
3428 }
3429 else
3430 {
3431 /*
3432 * Memory target.
3433 */
3434 uint32_t fAccess;
3435 if (pImpl->pfnLockedU16)
3436 fAccess = IEM_ACCESS_DATA_RW;
3437 else /* CMP */
3438 fAccess = IEM_ACCESS_DATA_R;
3439
3440 switch (pVCpu->iem.s.enmEffOpSize)
3441 {
3442 case IEMMODE_16BIT:
3443 {
3444 IEM_MC_BEGIN(3, 2);
3445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3446 IEM_MC_ARG(uint16_t, u16Src, 1);
3447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3449
3450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3451 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3452 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3453 if (pImpl->pfnLockedU16)
3454 IEMOP_HLP_DONE_DECODING();
3455 else
3456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3457 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3458 IEM_MC_FETCH_EFLAGS(EFlags);
3459 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3461 else
3462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3463
3464 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3465 IEM_MC_COMMIT_EFLAGS(EFlags);
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 break;
3469 }
3470
3471 case IEMMODE_32BIT:
3472 {
3473 IEM_MC_BEGIN(3, 2);
3474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3475 IEM_MC_ARG(uint32_t, u32Src, 1);
3476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3481 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3482 if (pImpl->pfnLockedU32)
3483 IEMOP_HLP_DONE_DECODING();
3484 else
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3487 IEM_MC_FETCH_EFLAGS(EFlags);
3488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3490 else
3491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3492
3493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3494 IEM_MC_COMMIT_EFLAGS(EFlags);
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 break;
3498 }
3499
3500 case IEMMODE_64BIT:
3501 {
3502 IEM_MC_BEGIN(3, 2);
3503 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3504 IEM_MC_ARG(uint64_t, u64Src, 1);
3505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3510 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3511 if (pImpl->pfnLockedU64)
3512 IEMOP_HLP_DONE_DECODING();
3513 else
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3516 IEM_MC_FETCH_EFLAGS(EFlags);
3517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3519 else
3520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3521
3522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3523 IEM_MC_COMMIT_EFLAGS(EFlags);
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 break;
3527 }
3528 }
3529 }
3530 return VINF_SUCCESS;
3531}
3532
3533
3534/**
3535 * @opcode 0x84
3536 */
3537FNIEMOP_DEF(iemOp_test_Eb_Gb)
3538{
3539 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3542}
3543
3544
3545/**
3546 * @opcode 0x85
3547 */
3548FNIEMOP_DEF(iemOp_test_Ev_Gv)
3549{
3550 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3551 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3553}
3554
3555
3556/**
3557 * @opcode 0x86
3558 */
3559FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3560{
3561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3562 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3563
3564 /*
3565 * If rm is denoting a register, no more instruction bytes.
3566 */
3567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3568 {
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570
3571 IEM_MC_BEGIN(0, 2);
3572 IEM_MC_LOCAL(uint8_t, uTmp1);
3573 IEM_MC_LOCAL(uint8_t, uTmp2);
3574
3575 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3576 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3577 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3578 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3579
3580 IEM_MC_ADVANCE_RIP();
3581 IEM_MC_END();
3582 }
3583 else
3584 {
3585 /*
3586 * We're accessing memory.
3587 */
3588/** @todo the register must be committed separately! */
3589 IEM_MC_BEGIN(2, 2);
3590 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3591 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3593
3594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3595 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3596 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3597 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3598 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3599
3600 IEM_MC_ADVANCE_RIP();
3601 IEM_MC_END();
3602 }
3603 return VINF_SUCCESS;
3604}
3605
3606
3607/**
3608 * @opcode 0x87
3609 */
3610FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3611{
3612 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3614
3615 /*
3616 * If rm is denoting a register, no more instruction bytes.
3617 */
3618 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3619 {
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621
3622 switch (pVCpu->iem.s.enmEffOpSize)
3623 {
3624 case IEMMODE_16BIT:
3625 IEM_MC_BEGIN(0, 2);
3626 IEM_MC_LOCAL(uint16_t, uTmp1);
3627 IEM_MC_LOCAL(uint16_t, uTmp2);
3628
3629 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3630 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3631 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3632 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3633
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 return VINF_SUCCESS;
3637
3638 case IEMMODE_32BIT:
3639 IEM_MC_BEGIN(0, 2);
3640 IEM_MC_LOCAL(uint32_t, uTmp1);
3641 IEM_MC_LOCAL(uint32_t, uTmp2);
3642
3643 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3644 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3645 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3646 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3647
3648 IEM_MC_ADVANCE_RIP();
3649 IEM_MC_END();
3650 return VINF_SUCCESS;
3651
3652 case IEMMODE_64BIT:
3653 IEM_MC_BEGIN(0, 2);
3654 IEM_MC_LOCAL(uint64_t, uTmp1);
3655 IEM_MC_LOCAL(uint64_t, uTmp2);
3656
3657 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3658 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3659 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3660 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3661
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 return VINF_SUCCESS;
3665
3666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3667 }
3668 }
3669 else
3670 {
3671 /*
3672 * We're accessing memory.
3673 */
3674 switch (pVCpu->iem.s.enmEffOpSize)
3675 {
3676/** @todo the register must be committed separately! */
3677 case IEMMODE_16BIT:
3678 IEM_MC_BEGIN(2, 2);
3679 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3680 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3682
3683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3684 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3685 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3686 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3687 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3688
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 return VINF_SUCCESS;
3692
3693 case IEMMODE_32BIT:
3694 IEM_MC_BEGIN(2, 2);
3695 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3696 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3698
3699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3700 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3701 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3702 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3704
3705 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3706 IEM_MC_ADVANCE_RIP();
3707 IEM_MC_END();
3708 return VINF_SUCCESS;
3709
3710 case IEMMODE_64BIT:
3711 IEM_MC_BEGIN(2, 2);
3712 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3713 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3715
3716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3717 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3718 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3719 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3721
3722 IEM_MC_ADVANCE_RIP();
3723 IEM_MC_END();
3724 return VINF_SUCCESS;
3725
3726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3727 }
3728 }
3729}
3730
3731
3732/**
3733 * @opcode 0x88
3734 */
3735FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3736{
3737 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3738
3739 uint8_t bRm;
3740 IEM_OPCODE_GET_NEXT_U8(&bRm);
3741
3742 /*
3743 * If rm is denoting a register, no more instruction bytes.
3744 */
3745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3746 {
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 IEM_MC_BEGIN(0, 1);
3749 IEM_MC_LOCAL(uint8_t, u8Value);
3750 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3751 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3752 IEM_MC_ADVANCE_RIP();
3753 IEM_MC_END();
3754 }
3755 else
3756 {
3757 /*
3758 * We're writing a register to memory.
3759 */
3760 IEM_MC_BEGIN(0, 2);
3761 IEM_MC_LOCAL(uint8_t, u8Value);
3762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3765 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3766 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3767 IEM_MC_ADVANCE_RIP();
3768 IEM_MC_END();
3769 }
3770 return VINF_SUCCESS;
3771
3772}
3773
3774
3775/**
3776 * @opcode 0x89
3777 */
3778FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3779{
3780 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3781
3782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3783
3784 /*
3785 * If rm is denoting a register, no more instruction bytes.
3786 */
3787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3788 {
3789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3790 switch (pVCpu->iem.s.enmEffOpSize)
3791 {
3792 case IEMMODE_16BIT:
3793 IEM_MC_BEGIN(0, 1);
3794 IEM_MC_LOCAL(uint16_t, u16Value);
3795 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3796 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3797 IEM_MC_ADVANCE_RIP();
3798 IEM_MC_END();
3799 break;
3800
3801 case IEMMODE_32BIT:
3802 IEM_MC_BEGIN(0, 1);
3803 IEM_MC_LOCAL(uint32_t, u32Value);
3804 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3805 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3806 IEM_MC_ADVANCE_RIP();
3807 IEM_MC_END();
3808 break;
3809
3810 case IEMMODE_64BIT:
3811 IEM_MC_BEGIN(0, 1);
3812 IEM_MC_LOCAL(uint64_t, u64Value);
3813 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3814 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3815 IEM_MC_ADVANCE_RIP();
3816 IEM_MC_END();
3817 break;
3818 }
3819 }
3820 else
3821 {
3822 /*
3823 * We're writing a register to memory.
3824 */
3825 switch (pVCpu->iem.s.enmEffOpSize)
3826 {
3827 case IEMMODE_16BIT:
3828 IEM_MC_BEGIN(0, 2);
3829 IEM_MC_LOCAL(uint16_t, u16Value);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3833 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3834 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 break;
3838
3839 case IEMMODE_32BIT:
3840 IEM_MC_BEGIN(0, 2);
3841 IEM_MC_LOCAL(uint32_t, u32Value);
3842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3846 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 break;
3850
3851 case IEMMODE_64BIT:
3852 IEM_MC_BEGIN(0, 2);
3853 IEM_MC_LOCAL(uint64_t, u64Value);
3854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3857 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3858 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 break;
3862 }
3863 }
3864 return VINF_SUCCESS;
3865}
3866
3867
3868/**
3869 * @opcode 0x8a
3870 */
3871FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3872{
3873 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3874
3875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3876
3877 /*
3878 * If rm is denoting a register, no more instruction bytes.
3879 */
3880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3881 {
3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3883 IEM_MC_BEGIN(0, 1);
3884 IEM_MC_LOCAL(uint8_t, u8Value);
3885 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3886 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3887 IEM_MC_ADVANCE_RIP();
3888 IEM_MC_END();
3889 }
3890 else
3891 {
3892 /*
3893 * We're loading a register from memory.
3894 */
3895 IEM_MC_BEGIN(0, 2);
3896 IEM_MC_LOCAL(uint8_t, u8Value);
3897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3900 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3901 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3902 IEM_MC_ADVANCE_RIP();
3903 IEM_MC_END();
3904 }
3905 return VINF_SUCCESS;
3906}
3907
3908
3909/**
3910 * @opcode 0x8b
3911 */
3912FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3913{
3914 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3915
3916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3917
3918 /*
3919 * If rm is denoting a register, no more instruction bytes.
3920 */
3921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3922 {
3923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3924 switch (pVCpu->iem.s.enmEffOpSize)
3925 {
3926 case IEMMODE_16BIT:
3927 IEM_MC_BEGIN(0, 1);
3928 IEM_MC_LOCAL(uint16_t, u16Value);
3929 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3930 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 break;
3934
3935 case IEMMODE_32BIT:
3936 IEM_MC_BEGIN(0, 1);
3937 IEM_MC_LOCAL(uint32_t, u32Value);
3938 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3939 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3940 IEM_MC_ADVANCE_RIP();
3941 IEM_MC_END();
3942 break;
3943
3944 case IEMMODE_64BIT:
3945 IEM_MC_BEGIN(0, 1);
3946 IEM_MC_LOCAL(uint64_t, u64Value);
3947 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3948 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3949 IEM_MC_ADVANCE_RIP();
3950 IEM_MC_END();
3951 break;
3952 }
3953 }
3954 else
3955 {
3956 /*
3957 * We're loading a register from memory.
3958 */
3959 switch (pVCpu->iem.s.enmEffOpSize)
3960 {
3961 case IEMMODE_16BIT:
3962 IEM_MC_BEGIN(0, 2);
3963 IEM_MC_LOCAL(uint16_t, u16Value);
3964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3967 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3968 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 break;
3972
3973 case IEMMODE_32BIT:
3974 IEM_MC_BEGIN(0, 2);
3975 IEM_MC_LOCAL(uint32_t, u32Value);
3976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3979 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3980 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3981 IEM_MC_ADVANCE_RIP();
3982 IEM_MC_END();
3983 break;
3984
3985 case IEMMODE_64BIT:
3986 IEM_MC_BEGIN(0, 2);
3987 IEM_MC_LOCAL(uint64_t, u64Value);
3988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3991 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3992 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 break;
3996 }
3997 }
3998 return VINF_SUCCESS;
3999}
4000
4001
4002/**
4003 * opcode 0x63
4004 * @todo Table fixme
4005 */
4006FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4007{
4008 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4009 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4010 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4011 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4012 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4013}
4014
4015
4016/**
4017 * @opcode 0x8c
4018 */
4019FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4020{
4021 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4022
4023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4024
4025 /*
4026 * Check that the destination register exists. The REX.R prefix is ignored.
4027 */
4028 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4029 if ( iSegReg > X86_SREG_GS)
4030 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4031
4032 /*
4033 * If rm is denoting a register, no more instruction bytes.
4034 * In that case, the operand size is respected and the upper bits are
4035 * cleared (starting with some pentium).
4036 */
4037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4038 {
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4040 switch (pVCpu->iem.s.enmEffOpSize)
4041 {
4042 case IEMMODE_16BIT:
4043 IEM_MC_BEGIN(0, 1);
4044 IEM_MC_LOCAL(uint16_t, u16Value);
4045 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4046 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4047 IEM_MC_ADVANCE_RIP();
4048 IEM_MC_END();
4049 break;
4050
4051 case IEMMODE_32BIT:
4052 IEM_MC_BEGIN(0, 1);
4053 IEM_MC_LOCAL(uint32_t, u32Value);
4054 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4055 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4056 IEM_MC_ADVANCE_RIP();
4057 IEM_MC_END();
4058 break;
4059
4060 case IEMMODE_64BIT:
4061 IEM_MC_BEGIN(0, 1);
4062 IEM_MC_LOCAL(uint64_t, u64Value);
4063 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4064 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4065 IEM_MC_ADVANCE_RIP();
4066 IEM_MC_END();
4067 break;
4068 }
4069 }
4070 else
4071 {
4072 /*
4073 * We're saving the register to memory. The access is word sized
4074 * regardless of operand size prefixes.
4075 */
4076#if 0 /* not necessary */
4077 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4078#endif
4079 IEM_MC_BEGIN(0, 2);
4080 IEM_MC_LOCAL(uint16_t, u16Value);
4081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4085 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4086 IEM_MC_ADVANCE_RIP();
4087 IEM_MC_END();
4088 }
4089 return VINF_SUCCESS;
4090}
4091
4092
4093
4094
4095/**
4096 * @opcode 0x8d
4097 */
4098FNIEMOP_DEF(iemOp_lea_Gv_M)
4099{
4100 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4103 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4104
4105 switch (pVCpu->iem.s.enmEffOpSize)
4106 {
4107 case IEMMODE_16BIT:
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4110 IEM_MC_LOCAL(uint16_t, u16Cast);
4111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4114 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 return VINF_SUCCESS;
4118
4119 case IEMMODE_32BIT:
4120 IEM_MC_BEGIN(0, 2);
4121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4122 IEM_MC_LOCAL(uint32_t, u32Cast);
4123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4125 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4126 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4127 IEM_MC_ADVANCE_RIP();
4128 IEM_MC_END();
4129 return VINF_SUCCESS;
4130
4131 case IEMMODE_64BIT:
4132 IEM_MC_BEGIN(0, 1);
4133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4136 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4137 IEM_MC_ADVANCE_RIP();
4138 IEM_MC_END();
4139 return VINF_SUCCESS;
4140 }
4141 AssertFailedReturn(VERR_IEM_IPE_7);
4142}
4143
4144
4145/**
4146 * @opcode 0x8e
4147 */
4148FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4149{
4150 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4151
4152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4153
4154 /*
4155 * The practical operand size is 16-bit.
4156 */
4157#if 0 /* not necessary */
4158 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4159#endif
4160
4161 /*
4162 * Check that the destination register exists and can be used with this
4163 * instruction. The REX.R prefix is ignored.
4164 */
4165 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4166 if ( iSegReg == X86_SREG_CS
4167 || iSegReg > X86_SREG_GS)
4168 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4169
4170 /*
4171 * If rm is denoting a register, no more instruction bytes.
4172 */
4173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4174 {
4175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4176 IEM_MC_BEGIN(2, 0);
4177 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4178 IEM_MC_ARG(uint16_t, u16Value, 1);
4179 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4180 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4181 IEM_MC_END();
4182 }
4183 else
4184 {
4185 /*
4186 * We're loading the register from memory. The access is word sized
4187 * regardless of operand size prefixes.
4188 */
4189 IEM_MC_BEGIN(2, 1);
4190 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4191 IEM_MC_ARG(uint16_t, u16Value, 1);
4192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4195 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4196 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4197 IEM_MC_END();
4198 }
4199 return VINF_SUCCESS;
4200}
4201
4202
4203/** Opcode 0x8f /0. */
4204FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4205{
4206 /* This bugger is rather annoying as it requires rSP to be updated before
4207 doing the effective address calculations. Will eventually require a
4208 split between the R/M+SIB decoding and the effective address
4209 calculation - which is something that is required for any attempt at
4210 reusing this code for a recompiler. It may also be good to have if we
4211 need to delay #UD exception caused by invalid lock prefixes.
4212
4213 For now, we'll do a mostly safe interpreter-only implementation here. */
4214 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4215 * now until tests show it's checked.. */
4216 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4217
4218 /* Register access is relatively easy and can share code. */
4219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4220 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4221
4222 /*
4223 * Memory target.
4224 *
4225 * Intel says that RSP is incremented before it's used in any effective
4226 * address calcuations. This means some serious extra annoyance here since
4227 * we decode and calculate the effective address in one step and like to
4228 * delay committing registers till everything is done.
4229 *
4230 * So, we'll decode and calculate the effective address twice. This will
4231 * require some recoding if turned into a recompiler.
4232 */
4233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4234
4235#ifndef TST_IEM_CHECK_MC
4236 /* Calc effective address with modified ESP. */
4237/** @todo testcase */
4238 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4239 RTGCPTR GCPtrEff;
4240 VBOXSTRICTRC rcStrict;
4241 switch (pVCpu->iem.s.enmEffOpSize)
4242 {
4243 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4244 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4245 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4247 }
4248 if (rcStrict != VINF_SUCCESS)
4249 return rcStrict;
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251
4252 /* Perform the operation - this should be CImpl. */
4253 RTUINT64U TmpRsp;
4254 TmpRsp.u = pCtx->rsp;
4255 switch (pVCpu->iem.s.enmEffOpSize)
4256 {
4257 case IEMMODE_16BIT:
4258 {
4259 uint16_t u16Value;
4260 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4261 if (rcStrict == VINF_SUCCESS)
4262 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4263 break;
4264 }
4265
4266 case IEMMODE_32BIT:
4267 {
4268 uint32_t u32Value;
4269 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4270 if (rcStrict == VINF_SUCCESS)
4271 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4272 break;
4273 }
4274
4275 case IEMMODE_64BIT:
4276 {
4277 uint64_t u64Value;
4278 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4279 if (rcStrict == VINF_SUCCESS)
4280 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4281 break;
4282 }
4283
4284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4285 }
4286 if (rcStrict == VINF_SUCCESS)
4287 {
4288 pCtx->rsp = TmpRsp.u;
4289 iemRegUpdateRipAndClearRF(pVCpu);
4290 }
4291 return rcStrict;
4292
4293#else
4294 return VERR_IEM_IPE_2;
4295#endif
4296}
4297
4298
4299/**
4300 * @opcode 0x8f
4301 */
4302FNIEMOP_DEF(iemOp_Grp1A__xop)
4303{
4304 /*
4305 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4306 * three byte VEX prefix, except that the mmmmm field cannot have the values
4307 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4308 */
4309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4310 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4311 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4312
4313 IEMOP_MNEMONIC(xop, "xop");
4314 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4315 {
4316 /** @todo Test when exctly the XOP conformance checks kick in during
4317 * instruction decoding and fetching (using \#PF). */
4318 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4319 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4320 if ( ( pVCpu->iem.s.fPrefixes
4321 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4322 == 0)
4323 {
4324 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4325 if (bXop2 & 0x80 /* XOP.W */)
4326 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4327 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4328 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4329 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4330 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4331 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4332 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4333
4334 /** @todo XOP: Just use new tables and decoders. */
4335 switch (bRm & 0x1f)
4336 {
4337 case 8: /* xop opcode map 8. */
4338 IEMOP_BITCH_ABOUT_STUB();
4339 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4340
4341 case 9: /* xop opcode map 9. */
4342 IEMOP_BITCH_ABOUT_STUB();
4343 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4344
4345 case 10: /* xop opcode map 10. */
4346 IEMOP_BITCH_ABOUT_STUB();
4347 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4348
4349 default:
4350 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4351 return IEMOP_RAISE_INVALID_OPCODE();
4352 }
4353 }
4354 else
4355 Log(("XOP: Invalid prefix mix!\n"));
4356 }
4357 else
4358 Log(("XOP: XOP support disabled!\n"));
4359 return IEMOP_RAISE_INVALID_OPCODE();
4360}
4361
4362
4363/**
4364 * Common 'xchg reg,rAX' helper.
4365 */
4366FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4367{
4368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4369
4370 iReg |= pVCpu->iem.s.uRexB;
4371 switch (pVCpu->iem.s.enmEffOpSize)
4372 {
4373 case IEMMODE_16BIT:
4374 IEM_MC_BEGIN(0, 2);
4375 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4376 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4377 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4378 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4379 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4380 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4381 IEM_MC_ADVANCE_RIP();
4382 IEM_MC_END();
4383 return VINF_SUCCESS;
4384
4385 case IEMMODE_32BIT:
4386 IEM_MC_BEGIN(0, 2);
4387 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4388 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4389 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4390 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4391 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4392 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4393 IEM_MC_ADVANCE_RIP();
4394 IEM_MC_END();
4395 return VINF_SUCCESS;
4396
4397 case IEMMODE_64BIT:
4398 IEM_MC_BEGIN(0, 2);
4399 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4400 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4401 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4402 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4403 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4404 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 return VINF_SUCCESS;
4408
4409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4410 }
4411}
4412
4413
4414/**
4415 * @opcode 0x90
4416 */
4417FNIEMOP_DEF(iemOp_nop)
4418{
4419 /* R8/R8D and RAX/EAX can be exchanged. */
4420 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4421 {
4422 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4423 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4424 }
4425
4426 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4427 IEMOP_MNEMONIC(pause, "pause");
4428 else
4429 IEMOP_MNEMONIC(nop, "nop");
4430 IEM_MC_BEGIN(0, 0);
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 return VINF_SUCCESS;
4434}
4435
4436
4437/**
4438 * @opcode 0x91
4439 */
4440FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4441{
4442 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4443 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4444}
4445
4446
4447/**
4448 * @opcode 0x92
4449 */
4450FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4451{
4452 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4453 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4454}
4455
4456
4457/**
4458 * @opcode 0x93
4459 */
4460FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4461{
4462 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4463 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4464}
4465
4466
4467/**
4468 * @opcode 0x94
4469 */
4470FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4471{
4472 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4473 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4474}
4475
4476
4477/**
4478 * @opcode 0x95
4479 */
4480FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4481{
4482 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4483 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4484}
4485
4486
4487/**
4488 * @opcode 0x96
4489 */
4490FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4491{
4492 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4493 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4494}
4495
4496
4497/**
4498 * @opcode 0x97
4499 */
4500FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4501{
4502 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4503 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4504}
4505
4506
4507/**
4508 * @opcode 0x98
4509 */
4510FNIEMOP_DEF(iemOp_cbw)
4511{
4512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4513 switch (pVCpu->iem.s.enmEffOpSize)
4514 {
4515 case IEMMODE_16BIT:
4516 IEMOP_MNEMONIC(cbw, "cbw");
4517 IEM_MC_BEGIN(0, 1);
4518 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4519 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4520 } IEM_MC_ELSE() {
4521 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4522 } IEM_MC_ENDIF();
4523 IEM_MC_ADVANCE_RIP();
4524 IEM_MC_END();
4525 return VINF_SUCCESS;
4526
4527 case IEMMODE_32BIT:
4528 IEMOP_MNEMONIC(cwde, "cwde");
4529 IEM_MC_BEGIN(0, 1);
4530 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4531 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4532 } IEM_MC_ELSE() {
4533 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4534 } IEM_MC_ENDIF();
4535 IEM_MC_ADVANCE_RIP();
4536 IEM_MC_END();
4537 return VINF_SUCCESS;
4538
4539 case IEMMODE_64BIT:
4540 IEMOP_MNEMONIC(cdqe, "cdqe");
4541 IEM_MC_BEGIN(0, 1);
4542 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4543 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4544 } IEM_MC_ELSE() {
4545 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4546 } IEM_MC_ENDIF();
4547 IEM_MC_ADVANCE_RIP();
4548 IEM_MC_END();
4549 return VINF_SUCCESS;
4550
4551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4552 }
4553}
4554
4555
4556/**
4557 * @opcode 0x99
4558 */
4559FNIEMOP_DEF(iemOp_cwd)
4560{
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562 switch (pVCpu->iem.s.enmEffOpSize)
4563 {
4564 case IEMMODE_16BIT:
4565 IEMOP_MNEMONIC(cwd, "cwd");
4566 IEM_MC_BEGIN(0, 1);
4567 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4568 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 return VINF_SUCCESS;
4575
4576 case IEMMODE_32BIT:
4577 IEMOP_MNEMONIC(cdq, "cdq");
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4580 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4581 } IEM_MC_ELSE() {
4582 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4583 } IEM_MC_ENDIF();
4584 IEM_MC_ADVANCE_RIP();
4585 IEM_MC_END();
4586 return VINF_SUCCESS;
4587
4588 case IEMMODE_64BIT:
4589 IEMOP_MNEMONIC(cqo, "cqo");
4590 IEM_MC_BEGIN(0, 1);
4591 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4592 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4593 } IEM_MC_ELSE() {
4594 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4595 } IEM_MC_ENDIF();
4596 IEM_MC_ADVANCE_RIP();
4597 IEM_MC_END();
4598 return VINF_SUCCESS;
4599
4600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4601 }
4602}
4603
4604
4605/**
4606 * @opcode 0x9a
4607 */
4608FNIEMOP_DEF(iemOp_call_Ap)
4609{
4610 IEMOP_MNEMONIC(call_Ap, "call Ap");
4611 IEMOP_HLP_NO_64BIT();
4612
4613 /* Decode the far pointer address and pass it on to the far call C implementation. */
4614 uint32_t offSeg;
4615 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4616 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4617 else
4618 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4619 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4621 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4622}
4623
4624
4625/** Opcode 0x9b. (aka fwait) */
4626FNIEMOP_DEF(iemOp_wait)
4627{
4628 IEMOP_MNEMONIC(wait, "wait");
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4630
4631 IEM_MC_BEGIN(0, 0);
4632 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 return VINF_SUCCESS;
4637}
4638
4639
4640/**
4641 * @opcode 0x9c
4642 */
4643FNIEMOP_DEF(iemOp_pushf_Fv)
4644{
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4647 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4648}
4649
4650
4651/**
4652 * @opcode 0x9d
4653 */
4654FNIEMOP_DEF(iemOp_popf_Fv)
4655{
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4658 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4659}
4660
4661
4662/**
4663 * @opcode 0x9e
4664 */
4665FNIEMOP_DEF(iemOp_sahf)
4666{
4667 IEMOP_MNEMONIC(sahf, "sahf");
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4670 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4671 return IEMOP_RAISE_INVALID_OPCODE();
4672 IEM_MC_BEGIN(0, 2);
4673 IEM_MC_LOCAL(uint32_t, u32Flags);
4674 IEM_MC_LOCAL(uint32_t, EFlags);
4675 IEM_MC_FETCH_EFLAGS(EFlags);
4676 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4677 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4678 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4679 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4680 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4681 IEM_MC_COMMIT_EFLAGS(EFlags);
4682 IEM_MC_ADVANCE_RIP();
4683 IEM_MC_END();
4684 return VINF_SUCCESS;
4685}
4686
4687
4688/**
4689 * @opcode 0x9f
4690 */
4691FNIEMOP_DEF(iemOp_lahf)
4692{
4693 IEMOP_MNEMONIC(lahf, "lahf");
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4696 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4697 return IEMOP_RAISE_INVALID_OPCODE();
4698 IEM_MC_BEGIN(0, 1);
4699 IEM_MC_LOCAL(uint8_t, u8Flags);
4700 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4701 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4702 IEM_MC_ADVANCE_RIP();
4703 IEM_MC_END();
4704 return VINF_SUCCESS;
4705}
4706
4707
4708/**
4709 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4710 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4711 * prefixes. Will return on failures.
4712 * @param a_GCPtrMemOff The variable to store the offset in.
4713 */
4714#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4715 do \
4716 { \
4717 switch (pVCpu->iem.s.enmEffAddrMode) \
4718 { \
4719 case IEMMODE_16BIT: \
4720 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4721 break; \
4722 case IEMMODE_32BIT: \
4723 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4724 break; \
4725 case IEMMODE_64BIT: \
4726 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4727 break; \
4728 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4729 } \
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4731 } while (0)
4732
4733/**
4734 * @opcode 0xa0
4735 */
4736FNIEMOP_DEF(iemOp_mov_AL_Ob)
4737{
4738 /*
4739 * Get the offset and fend of lock prefixes.
4740 */
4741 RTGCPTR GCPtrMemOff;
4742 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4743
4744 /*
4745 * Fetch AL.
4746 */
4747 IEM_MC_BEGIN(0,1);
4748 IEM_MC_LOCAL(uint8_t, u8Tmp);
4749 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4750 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 return VINF_SUCCESS;
4754}
4755
4756
4757/**
4758 * @opcode 0xa1
4759 */
4760FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4761{
4762 /*
4763 * Get the offset and fend of lock prefixes.
4764 */
4765 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4766 RTGCPTR GCPtrMemOff;
4767 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4768
4769 /*
4770 * Fetch rAX.
4771 */
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0,1);
4776 IEM_MC_LOCAL(uint16_t, u16Tmp);
4777 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4778 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 return VINF_SUCCESS;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0,1);
4785 IEM_MC_LOCAL(uint32_t, u32Tmp);
4786 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4787 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 return VINF_SUCCESS;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0,1);
4794 IEM_MC_LOCAL(uint64_t, u64Tmp);
4795 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4796 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 return VINF_SUCCESS;
4800
4801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4802 }
4803}
4804
4805
4806/**
4807 * @opcode 0xa2
4808 */
4809FNIEMOP_DEF(iemOp_mov_Ob_AL)
4810{
4811 /*
4812 * Get the offset and fend of lock prefixes.
4813 */
4814 RTGCPTR GCPtrMemOff;
4815 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4816
4817 /*
4818 * Store AL.
4819 */
4820 IEM_MC_BEGIN(0,1);
4821 IEM_MC_LOCAL(uint8_t, u8Tmp);
4822 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4823 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4824 IEM_MC_ADVANCE_RIP();
4825 IEM_MC_END();
4826 return VINF_SUCCESS;
4827}
4828
4829
4830/**
4831 * @opcode 0xa3
4832 */
4833FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4834{
4835 /*
4836 * Get the offset and fend of lock prefixes.
4837 */
4838 RTGCPTR GCPtrMemOff;
4839 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4840
4841 /*
4842 * Store rAX.
4843 */
4844 switch (pVCpu->iem.s.enmEffOpSize)
4845 {
4846 case IEMMODE_16BIT:
4847 IEM_MC_BEGIN(0,1);
4848 IEM_MC_LOCAL(uint16_t, u16Tmp);
4849 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4850 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4851 IEM_MC_ADVANCE_RIP();
4852 IEM_MC_END();
4853 return VINF_SUCCESS;
4854
4855 case IEMMODE_32BIT:
4856 IEM_MC_BEGIN(0,1);
4857 IEM_MC_LOCAL(uint32_t, u32Tmp);
4858 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4859 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4860 IEM_MC_ADVANCE_RIP();
4861 IEM_MC_END();
4862 return VINF_SUCCESS;
4863
4864 case IEMMODE_64BIT:
4865 IEM_MC_BEGIN(0,1);
4866 IEM_MC_LOCAL(uint64_t, u64Tmp);
4867 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4868 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4869 IEM_MC_ADVANCE_RIP();
4870 IEM_MC_END();
4871 return VINF_SUCCESS;
4872
4873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4874 }
4875}
4876
4877/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4878#define IEM_MOVS_CASE(ValBits, AddrBits) \
4879 IEM_MC_BEGIN(0, 2); \
4880 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4881 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4882 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4883 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4884 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4885 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4887 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4888 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4889 } IEM_MC_ELSE() { \
4890 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4891 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4892 } IEM_MC_ENDIF(); \
4893 IEM_MC_ADVANCE_RIP(); \
4894 IEM_MC_END();
4895
4896/**
4897 * @opcode 0xa4
4898 */
4899FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4900{
4901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4902
4903 /*
4904 * Use the C implementation if a repeat prefix is encountered.
4905 */
4906 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4907 {
4908 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4909 switch (pVCpu->iem.s.enmEffAddrMode)
4910 {
4911 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4912 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4913 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4915 }
4916 }
4917 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4918
4919 /*
4920 * Sharing case implementation with movs[wdq] below.
4921 */
4922 switch (pVCpu->iem.s.enmEffAddrMode)
4923 {
4924 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4925 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4926 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4928 }
4929 return VINF_SUCCESS;
4930}
4931
4932
4933/**
4934 * @opcode 0xa5
4935 */
4936FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4937{
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939
4940 /*
4941 * Use the C implementation if a repeat prefix is encountered.
4942 */
4943 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4944 {
4945 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4946 switch (pVCpu->iem.s.enmEffOpSize)
4947 {
4948 case IEMMODE_16BIT:
4949 switch (pVCpu->iem.s.enmEffAddrMode)
4950 {
4951 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4952 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4953 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4955 }
4956 break;
4957 case IEMMODE_32BIT:
4958 switch (pVCpu->iem.s.enmEffAddrMode)
4959 {
4960 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4961 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4962 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4964 }
4965 case IEMMODE_64BIT:
4966 switch (pVCpu->iem.s.enmEffAddrMode)
4967 {
4968 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4972 }
4973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4974 }
4975 }
4976 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4977
4978 /*
4979 * Annoying double switch here.
4980 * Using ugly macro for implementing the cases, sharing it with movsb.
4981 */
4982 switch (pVCpu->iem.s.enmEffOpSize)
4983 {
4984 case IEMMODE_16BIT:
4985 switch (pVCpu->iem.s.enmEffAddrMode)
4986 {
4987 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4988 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4989 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4991 }
4992 break;
4993
4994 case IEMMODE_32BIT:
4995 switch (pVCpu->iem.s.enmEffAddrMode)
4996 {
4997 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4998 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4999 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5001 }
5002 break;
5003
5004 case IEMMODE_64BIT:
5005 switch (pVCpu->iem.s.enmEffAddrMode)
5006 {
5007 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5008 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5009 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5011 }
5012 break;
5013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5014 }
5015 return VINF_SUCCESS;
5016}
5017
5018#undef IEM_MOVS_CASE
5019
5020/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5021#define IEM_CMPS_CASE(ValBits, AddrBits) \
5022 IEM_MC_BEGIN(3, 3); \
5023 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5024 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5025 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5026 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5027 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5028 \
5029 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5030 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5031 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5032 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5033 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5034 IEM_MC_REF_EFLAGS(pEFlags); \
5035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5036 \
5037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5038 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5039 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5040 } IEM_MC_ELSE() { \
5041 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5042 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5043 } IEM_MC_ENDIF(); \
5044 IEM_MC_ADVANCE_RIP(); \
5045 IEM_MC_END(); \
5046
5047/**
5048 * @opcode 0xa6
5049 */
5050FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5051{
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5053
5054 /*
5055 * Use the C implementation if a repeat prefix is encountered.
5056 */
5057 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5058 {
5059 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5060 switch (pVCpu->iem.s.enmEffAddrMode)
5061 {
5062 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5063 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5064 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5066 }
5067 }
5068 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5069 {
5070 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5071 switch (pVCpu->iem.s.enmEffAddrMode)
5072 {
5073 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5074 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5075 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5077 }
5078 }
5079 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5080
5081 /*
5082 * Sharing case implementation with cmps[wdq] below.
5083 */
5084 switch (pVCpu->iem.s.enmEffAddrMode)
5085 {
5086 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5087 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5088 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5090 }
5091 return VINF_SUCCESS;
5092
5093}
5094
5095
5096/**
5097 * @opcode 0xa7
5098 */
5099FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5100{
5101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5102
5103 /*
5104 * Use the C implementation if a repeat prefix is encountered.
5105 */
5106 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5107 {
5108 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5109 switch (pVCpu->iem.s.enmEffOpSize)
5110 {
5111 case IEMMODE_16BIT:
5112 switch (pVCpu->iem.s.enmEffAddrMode)
5113 {
5114 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5115 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5116 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5118 }
5119 break;
5120 case IEMMODE_32BIT:
5121 switch (pVCpu->iem.s.enmEffAddrMode)
5122 {
5123 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5124 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5125 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5127 }
5128 case IEMMODE_64BIT:
5129 switch (pVCpu->iem.s.enmEffAddrMode)
5130 {
5131 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5132 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5133 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5135 }
5136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5137 }
5138 }
5139
5140 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5141 {
5142 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5143 switch (pVCpu->iem.s.enmEffOpSize)
5144 {
5145 case IEMMODE_16BIT:
5146 switch (pVCpu->iem.s.enmEffAddrMode)
5147 {
5148 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5149 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5150 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5152 }
5153 break;
5154 case IEMMODE_32BIT:
5155 switch (pVCpu->iem.s.enmEffAddrMode)
5156 {
5157 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5158 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5159 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5161 }
5162 case IEMMODE_64BIT:
5163 switch (pVCpu->iem.s.enmEffAddrMode)
5164 {
5165 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5166 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5167 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5169 }
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173
5174 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5175
5176 /*
5177 * Annoying double switch here.
5178 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5179 */
5180 switch (pVCpu->iem.s.enmEffOpSize)
5181 {
5182 case IEMMODE_16BIT:
5183 switch (pVCpu->iem.s.enmEffAddrMode)
5184 {
5185 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5186 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5187 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5189 }
5190 break;
5191
5192 case IEMMODE_32BIT:
5193 switch (pVCpu->iem.s.enmEffAddrMode)
5194 {
5195 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5196 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5197 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5199 }
5200 break;
5201
5202 case IEMMODE_64BIT:
5203 switch (pVCpu->iem.s.enmEffAddrMode)
5204 {
5205 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5206 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5207 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5209 }
5210 break;
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213 return VINF_SUCCESS;
5214
5215}
5216
5217#undef IEM_CMPS_CASE
5218
5219/**
5220 * @opcode 0xa8
5221 */
5222FNIEMOP_DEF(iemOp_test_AL_Ib)
5223{
5224 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5225 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5226 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5227}
5228
5229
5230/**
5231 * @opcode 0xa9
5232 */
5233FNIEMOP_DEF(iemOp_test_eAX_Iz)
5234{
5235 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5236 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5237 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5238}
5239
5240
5241/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5242#define IEM_STOS_CASE(ValBits, AddrBits) \
5243 IEM_MC_BEGIN(0, 2); \
5244 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5245 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5246 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5247 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5248 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5250 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5251 } IEM_MC_ELSE() { \
5252 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5253 } IEM_MC_ENDIF(); \
5254 IEM_MC_ADVANCE_RIP(); \
5255 IEM_MC_END(); \
5256
5257/**
5258 * @opcode 0xaa
5259 */
5260FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5261{
5262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5263
5264 /*
5265 * Use the C implementation if a repeat prefix is encountered.
5266 */
5267 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5268 {
5269 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5270 switch (pVCpu->iem.s.enmEffAddrMode)
5271 {
5272 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5273 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5274 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5276 }
5277 }
5278 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5279
5280 /*
5281 * Sharing case implementation with stos[wdq] below.
5282 */
5283 switch (pVCpu->iem.s.enmEffAddrMode)
5284 {
5285 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5286 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5287 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5289 }
5290 return VINF_SUCCESS;
5291}
5292
5293
5294/**
5295 * @opcode 0xab
5296 */
5297FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5298{
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300
5301 /*
5302 * Use the C implementation if a repeat prefix is encountered.
5303 */
5304 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5305 {
5306 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5307 switch (pVCpu->iem.s.enmEffOpSize)
5308 {
5309 case IEMMODE_16BIT:
5310 switch (pVCpu->iem.s.enmEffAddrMode)
5311 {
5312 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5313 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5314 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 break;
5318 case IEMMODE_32BIT:
5319 switch (pVCpu->iem.s.enmEffAddrMode)
5320 {
5321 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5322 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5323 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5325 }
5326 case IEMMODE_64BIT:
5327 switch (pVCpu->iem.s.enmEffAddrMode)
5328 {
5329 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5330 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5331 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5333 }
5334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5335 }
5336 }
5337 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5338
5339 /*
5340 * Annoying double switch here.
5341 * Using ugly macro for implementing the cases, sharing it with stosb.
5342 */
5343 switch (pVCpu->iem.s.enmEffOpSize)
5344 {
5345 case IEMMODE_16BIT:
5346 switch (pVCpu->iem.s.enmEffAddrMode)
5347 {
5348 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5349 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5350 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5352 }
5353 break;
5354
5355 case IEMMODE_32BIT:
5356 switch (pVCpu->iem.s.enmEffAddrMode)
5357 {
5358 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5359 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5360 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5362 }
5363 break;
5364
5365 case IEMMODE_64BIT:
5366 switch (pVCpu->iem.s.enmEffAddrMode)
5367 {
5368 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5369 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5370 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5372 }
5373 break;
5374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5375 }
5376 return VINF_SUCCESS;
5377}
5378
5379#undef IEM_STOS_CASE
5380
5381/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5382#define IEM_LODS_CASE(ValBits, AddrBits) \
5383 IEM_MC_BEGIN(0, 2); \
5384 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5385 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5386 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5387 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5388 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5390 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5391 } IEM_MC_ELSE() { \
5392 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5393 } IEM_MC_ENDIF(); \
5394 IEM_MC_ADVANCE_RIP(); \
5395 IEM_MC_END();
5396
5397/**
5398 * @opcode 0xac
5399 */
5400FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5401{
5402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5403
5404 /*
5405 * Use the C implementation if a repeat prefix is encountered.
5406 */
5407 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5408 {
5409 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5410 switch (pVCpu->iem.s.enmEffAddrMode)
5411 {
5412 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5413 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5414 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5416 }
5417 }
5418 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5419
5420 /*
5421 * Sharing case implementation with stos[wdq] below.
5422 */
5423 switch (pVCpu->iem.s.enmEffAddrMode)
5424 {
5425 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5426 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5427 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 return VINF_SUCCESS;
5431}
5432
5433
5434/**
5435 * @opcode 0xad
5436 */
5437FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5438{
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440
5441 /*
5442 * Use the C implementation if a repeat prefix is encountered.
5443 */
5444 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5445 {
5446 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5447 switch (pVCpu->iem.s.enmEffOpSize)
5448 {
5449 case IEMMODE_16BIT:
5450 switch (pVCpu->iem.s.enmEffAddrMode)
5451 {
5452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5456 }
5457 break;
5458 case IEMMODE_32BIT:
5459 switch (pVCpu->iem.s.enmEffAddrMode)
5460 {
5461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5465 }
5466 case IEMMODE_64BIT:
5467 switch (pVCpu->iem.s.enmEffAddrMode)
5468 {
5469 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5470 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5471 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5473 }
5474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5475 }
5476 }
5477 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5478
5479 /*
5480 * Annoying double switch here.
5481 * Using ugly macro for implementing the cases, sharing it with lodsb.
5482 */
5483 switch (pVCpu->iem.s.enmEffOpSize)
5484 {
5485 case IEMMODE_16BIT:
5486 switch (pVCpu->iem.s.enmEffAddrMode)
5487 {
5488 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5489 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5490 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5492 }
5493 break;
5494
5495 case IEMMODE_32BIT:
5496 switch (pVCpu->iem.s.enmEffAddrMode)
5497 {
5498 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5499 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5500 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5502 }
5503 break;
5504
5505 case IEMMODE_64BIT:
5506 switch (pVCpu->iem.s.enmEffAddrMode)
5507 {
5508 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5509 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5510 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5512 }
5513 break;
5514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5515 }
5516 return VINF_SUCCESS;
5517}
5518
5519#undef IEM_LODS_CASE
5520
5521/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5522#define IEM_SCAS_CASE(ValBits, AddrBits) \
5523 IEM_MC_BEGIN(3, 2); \
5524 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5525 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5526 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5527 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5528 \
5529 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5530 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5531 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5532 IEM_MC_REF_EFLAGS(pEFlags); \
5533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5534 \
5535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5536 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5537 } IEM_MC_ELSE() { \
5538 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5539 } IEM_MC_ENDIF(); \
5540 IEM_MC_ADVANCE_RIP(); \
5541 IEM_MC_END();
5542
5543/**
5544 * @opcode 0xae
5545 */
5546FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5547{
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549
5550 /*
5551 * Use the C implementation if a repeat prefix is encountered.
5552 */
5553 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5554 {
5555 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5556 switch (pVCpu->iem.s.enmEffAddrMode)
5557 {
5558 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5559 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5560 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5562 }
5563 }
5564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5565 {
5566 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5567 switch (pVCpu->iem.s.enmEffAddrMode)
5568 {
5569 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5570 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5571 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5573 }
5574 }
5575 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5576
5577 /*
5578 * Sharing case implementation with stos[wdq] below.
5579 */
5580 switch (pVCpu->iem.s.enmEffAddrMode)
5581 {
5582 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5583 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5584 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5586 }
5587 return VINF_SUCCESS;
5588}
5589
5590
5591/**
5592 * @opcode 0xaf
5593 */
5594FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5595{
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597
5598 /*
5599 * Use the C implementation if a repeat prefix is encountered.
5600 */
5601 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5602 {
5603 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5604 switch (pVCpu->iem.s.enmEffOpSize)
5605 {
5606 case IEMMODE_16BIT:
5607 switch (pVCpu->iem.s.enmEffAddrMode)
5608 {
5609 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5610 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5611 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5613 }
5614 break;
5615 case IEMMODE_32BIT:
5616 switch (pVCpu->iem.s.enmEffAddrMode)
5617 {
5618 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5619 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5620 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 case IEMMODE_64BIT:
5624 switch (pVCpu->iem.s.enmEffAddrMode)
5625 {
5626 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5627 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5628 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5630 }
5631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5632 }
5633 }
5634 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5635 {
5636 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5637 switch (pVCpu->iem.s.enmEffOpSize)
5638 {
5639 case IEMMODE_16BIT:
5640 switch (pVCpu->iem.s.enmEffAddrMode)
5641 {
5642 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5643 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5644 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5646 }
5647 break;
5648 case IEMMODE_32BIT:
5649 switch (pVCpu->iem.s.enmEffAddrMode)
5650 {
5651 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5652 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5653 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5655 }
5656 case IEMMODE_64BIT:
5657 switch (pVCpu->iem.s.enmEffAddrMode)
5658 {
5659 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5663 }
5664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5665 }
5666 }
5667 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5668
5669 /*
5670 * Annoying double switch here.
5671 * Using ugly macro for implementing the cases, sharing it with scasb.
5672 */
5673 switch (pVCpu->iem.s.enmEffOpSize)
5674 {
5675 case IEMMODE_16BIT:
5676 switch (pVCpu->iem.s.enmEffAddrMode)
5677 {
5678 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5679 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5680 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5682 }
5683 break;
5684
5685 case IEMMODE_32BIT:
5686 switch (pVCpu->iem.s.enmEffAddrMode)
5687 {
5688 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5689 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5690 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5692 }
5693 break;
5694
5695 case IEMMODE_64BIT:
5696 switch (pVCpu->iem.s.enmEffAddrMode)
5697 {
5698 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5699 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5700 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5702 }
5703 break;
5704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5705 }
5706 return VINF_SUCCESS;
5707}
5708
5709#undef IEM_SCAS_CASE
5710
5711/**
5712 * Common 'mov r8, imm8' helper.
5713 */
5714FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5715{
5716 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718
5719 IEM_MC_BEGIN(0, 1);
5720 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5721 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5722 IEM_MC_ADVANCE_RIP();
5723 IEM_MC_END();
5724
5725 return VINF_SUCCESS;
5726}
5727
5728
5729/**
5730 * @opcode 0xb0
5731 */
5732FNIEMOP_DEF(iemOp_mov_AL_Ib)
5733{
5734 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5735 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5736}
5737
5738
5739/**
5740 * @opcode 0xb1
5741 */
5742FNIEMOP_DEF(iemOp_CL_Ib)
5743{
5744 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5746}
5747
5748
5749/**
5750 * @opcode 0xb2
5751 */
5752FNIEMOP_DEF(iemOp_DL_Ib)
5753{
5754 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5755 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5756}
5757
5758
5759/**
5760 * @opcode 0xb3
5761 */
5762FNIEMOP_DEF(iemOp_BL_Ib)
5763{
5764 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5765 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5766}
5767
5768
5769/**
5770 * @opcode 0xb4
5771 */
5772FNIEMOP_DEF(iemOp_mov_AH_Ib)
5773{
5774 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5775 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5776}
5777
5778
5779/**
5780 * @opcode 0xb5
5781 */
5782FNIEMOP_DEF(iemOp_CH_Ib)
5783{
5784 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5785 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5786}
5787
5788
5789/**
5790 * @opcode 0xb6
5791 */
5792FNIEMOP_DEF(iemOp_DH_Ib)
5793{
5794 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5795 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5796}
5797
5798
5799/**
5800 * @opcode 0xb7
5801 */
5802FNIEMOP_DEF(iemOp_BH_Ib)
5803{
5804 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5805 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5806}
5807
5808
5809/**
5810 * Common 'mov regX,immX' helper.
5811 */
5812FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5813{
5814 switch (pVCpu->iem.s.enmEffOpSize)
5815 {
5816 case IEMMODE_16BIT:
5817 {
5818 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5820
5821 IEM_MC_BEGIN(0, 1);
5822 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5823 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 break;
5827 }
5828
5829 case IEMMODE_32BIT:
5830 {
5831 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5833
5834 IEM_MC_BEGIN(0, 1);
5835 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5836 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5837 IEM_MC_ADVANCE_RIP();
5838 IEM_MC_END();
5839 break;
5840 }
5841 case IEMMODE_64BIT:
5842 {
5843 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845
5846 IEM_MC_BEGIN(0, 1);
5847 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5848 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 break;
5852 }
5853 }
5854
5855 return VINF_SUCCESS;
5856}
5857
5858
5859/**
5860 * @opcode 0xb8
5861 */
5862FNIEMOP_DEF(iemOp_eAX_Iv)
5863{
5864 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5865 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5866}
5867
5868
5869/**
5870 * @opcode 0xb9
5871 */
5872FNIEMOP_DEF(iemOp_eCX_Iv)
5873{
5874 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5875 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5876}
5877
5878
5879/**
5880 * @opcode 0xba
5881 */
5882FNIEMOP_DEF(iemOp_eDX_Iv)
5883{
5884 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5885 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5886}
5887
5888
5889/**
5890 * @opcode 0xbb
5891 */
5892FNIEMOP_DEF(iemOp_eBX_Iv)
5893{
5894 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5895 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5896}
5897
5898
5899/**
5900 * @opcode 0xbc
5901 */
5902FNIEMOP_DEF(iemOp_eSP_Iv)
5903{
5904 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5905 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5906}
5907
5908
5909/**
5910 * @opcode 0xbd
5911 */
5912FNIEMOP_DEF(iemOp_eBP_Iv)
5913{
5914 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5915 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5916}
5917
5918
5919/**
5920 * @opcode 0xbe
5921 */
5922FNIEMOP_DEF(iemOp_eSI_Iv)
5923{
5924 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5925 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5926}
5927
5928
5929/**
5930 * @opcode 0xbf
5931 */
5932FNIEMOP_DEF(iemOp_eDI_Iv)
5933{
5934 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5935 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5936}
5937
5938
5939/**
5940 * @opcode 0xc0
5941 */
5942FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5943{
5944 IEMOP_HLP_MIN_186();
5945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5946 PCIEMOPSHIFTSIZES pImpl;
5947 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5948 {
5949 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5950 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5951 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5952 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5953 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5954 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5955 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5956 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5957 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5958 }
5959 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5960
5961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5962 {
5963 /* register */
5964 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5966 IEM_MC_BEGIN(3, 0);
5967 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5968 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5969 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5970 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5971 IEM_MC_REF_EFLAGS(pEFlags);
5972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 }
5976 else
5977 {
5978 /* memory */
5979 IEM_MC_BEGIN(3, 2);
5980 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5981 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5982 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5984
5985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5986 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5987 IEM_MC_ASSIGN(cShiftArg, cShift);
5988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5989 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5990 IEM_MC_FETCH_EFLAGS(EFlags);
5991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5992
5993 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5994 IEM_MC_COMMIT_EFLAGS(EFlags);
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 }
5998 return VINF_SUCCESS;
5999}
6000
6001
6002/**
6003 * @opcode 0xc1
6004 */
6005FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6006{
6007 IEMOP_HLP_MIN_186();
6008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6009 PCIEMOPSHIFTSIZES pImpl;
6010 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6011 {
6012 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6013 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6014 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6015 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6016 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6017 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6018 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6019 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6020 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6021 }
6022 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6023
6024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6025 {
6026 /* register */
6027 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6029 switch (pVCpu->iem.s.enmEffOpSize)
6030 {
6031 case IEMMODE_16BIT:
6032 IEM_MC_BEGIN(3, 0);
6033 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6034 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6036 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6037 IEM_MC_REF_EFLAGS(pEFlags);
6038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 return VINF_SUCCESS;
6042
6043 case IEMMODE_32BIT:
6044 IEM_MC_BEGIN(3, 0);
6045 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6046 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6048 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6049 IEM_MC_REF_EFLAGS(pEFlags);
6050 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6051 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6052 IEM_MC_ADVANCE_RIP();
6053 IEM_MC_END();
6054 return VINF_SUCCESS;
6055
6056 case IEMMODE_64BIT:
6057 IEM_MC_BEGIN(3, 0);
6058 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6059 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6061 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6062 IEM_MC_REF_EFLAGS(pEFlags);
6063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 return VINF_SUCCESS;
6067
6068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6069 }
6070 }
6071 else
6072 {
6073 /* memory */
6074 switch (pVCpu->iem.s.enmEffOpSize)
6075 {
6076 case IEMMODE_16BIT:
6077 IEM_MC_BEGIN(3, 2);
6078 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6079 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6080 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6082
6083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6084 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6085 IEM_MC_ASSIGN(cShiftArg, cShift);
6086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6087 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6088 IEM_MC_FETCH_EFLAGS(EFlags);
6089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6090
6091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6092 IEM_MC_COMMIT_EFLAGS(EFlags);
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 return VINF_SUCCESS;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(3, 2);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6101 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6103
6104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6105 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6106 IEM_MC_ASSIGN(cShiftArg, cShift);
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6109 IEM_MC_FETCH_EFLAGS(EFlags);
6110 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6111
6112 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6113 IEM_MC_COMMIT_EFLAGS(EFlags);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 return VINF_SUCCESS;
6117
6118 case IEMMODE_64BIT:
6119 IEM_MC_BEGIN(3, 2);
6120 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6121 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6122 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6124
6125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6126 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6127 IEM_MC_ASSIGN(cShiftArg, cShift);
6128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6129 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6130 IEM_MC_FETCH_EFLAGS(EFlags);
6131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6132
6133 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6134 IEM_MC_COMMIT_EFLAGS(EFlags);
6135 IEM_MC_ADVANCE_RIP();
6136 IEM_MC_END();
6137 return VINF_SUCCESS;
6138
6139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6140 }
6141 }
6142}
6143
6144
6145/**
6146 * @opcode 0xc2
6147 */
6148FNIEMOP_DEF(iemOp_retn_Iw)
6149{
6150 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6151 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6154 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6155}
6156
6157
6158/**
6159 * @opcode 0xc3
6160 */
6161FNIEMOP_DEF(iemOp_retn)
6162{
6163 IEMOP_MNEMONIC(retn, "retn");
6164 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6167}
6168
6169
6170/**
6171 * @opcode 0xc4
6172 */
6173FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
6174{
6175 /* The LES instruction is invalid 64-bit mode. In legacy and
6176 compatability mode it is invalid with MOD=3.
6177 The use as a VEX prefix is made possible by assigning the inverted
6178 REX.R to the top MOD bit, and the top bit in the inverted register
6179 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6180 to accessing registers 0..7 in this VEX form. */
6181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6182 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6183 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6184 {
6185 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6186 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6187 {
6188 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6189 if ( ( pVCpu->iem.s.fPrefixes
6190 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6191 == 0)
6192 {
6193 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6194 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6195 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6196 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6197 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6198
6199 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6200 }
6201
6202 Log(("VEX2: Invalid prefix mix!\n"));
6203 }
6204 else
6205 Log(("VEX2: AVX support disabled!\n"));
6206
6207 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6208 return IEMOP_RAISE_INVALID_OPCODE();
6209 }
6210 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6211 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6212}
6213
6214
6215/**
6216 * @opcode 0xc5
6217 */
6218FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
6219{
6220 /* The LDS instruction is invalid 64-bit mode. In legacy and
6221 compatability mode it is invalid with MOD=3.
6222 The use as a VEX prefix is made possible by assigning the inverted
6223 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6224 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6226 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
6227 {
6228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6229 {
6230 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6231 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6232 }
6233 IEMOP_HLP_NO_REAL_OR_V86_MODE();
6234 }
6235
6236 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6237 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6238 {
6239 /** @todo Test when exctly the VEX conformance checks kick in during
6240 * instruction decoding and fetching (using \#PF). */
6241 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6242 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6243 if ( ( pVCpu->iem.s.fPrefixes
6244 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6245 == 0)
6246 {
6247 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6248 if (bVex2 & 0x80 /* VEX.W */)
6249 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6250 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6251 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6252 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6253 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6254 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6255 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6256
6257 switch (bRm & 0x1f)
6258 {
6259 case 1: /* 0x0f lead opcode byte. */
6260 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6261
6262 case 2: /* 0x0f 0x38 lead opcode bytes. */
6263 /** @todo VEX: Just use new tables and decoders. */
6264 IEMOP_BITCH_ABOUT_STUB();
6265 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6266
6267 case 3: /* 0x0f 0x3a lead opcode bytes. */
6268 /** @todo VEX: Just use new tables and decoders. */
6269 IEMOP_BITCH_ABOUT_STUB();
6270 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6271
6272 default:
6273 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6274 return IEMOP_RAISE_INVALID_OPCODE();
6275 }
6276 }
6277 else
6278 Log(("VEX3: Invalid prefix mix!\n"));
6279 }
6280 else
6281 Log(("VEX3: AVX support disabled!\n"));
6282 return IEMOP_RAISE_INVALID_OPCODE();
6283}
6284
6285
6286/**
6287 * @opcode 0xc6
6288 */
6289FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6290{
6291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6292 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6293 return IEMOP_RAISE_INVALID_OPCODE();
6294 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6295
6296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6297 {
6298 /* register access */
6299 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6301 IEM_MC_BEGIN(0, 0);
6302 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6303 IEM_MC_ADVANCE_RIP();
6304 IEM_MC_END();
6305 }
6306 else
6307 {
6308 /* memory access. */
6309 IEM_MC_BEGIN(0, 1);
6310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6312 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6315 IEM_MC_ADVANCE_RIP();
6316 IEM_MC_END();
6317 }
6318 return VINF_SUCCESS;
6319}
6320
6321
6322/**
6323 * @opcode 0xc7
6324 */
6325FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6326{
6327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6328 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6329 return IEMOP_RAISE_INVALID_OPCODE();
6330 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6331
6332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6333 {
6334 /* register access */
6335 switch (pVCpu->iem.s.enmEffOpSize)
6336 {
6337 case IEMMODE_16BIT:
6338 IEM_MC_BEGIN(0, 0);
6339 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6341 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6342 IEM_MC_ADVANCE_RIP();
6343 IEM_MC_END();
6344 return VINF_SUCCESS;
6345
6346 case IEMMODE_32BIT:
6347 IEM_MC_BEGIN(0, 0);
6348 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6351 IEM_MC_ADVANCE_RIP();
6352 IEM_MC_END();
6353 return VINF_SUCCESS;
6354
6355 case IEMMODE_64BIT:
6356 IEM_MC_BEGIN(0, 0);
6357 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6359 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363
6364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6365 }
6366 }
6367 else
6368 {
6369 /* memory access. */
6370 switch (pVCpu->iem.s.enmEffOpSize)
6371 {
6372 case IEMMODE_16BIT:
6373 IEM_MC_BEGIN(0, 1);
6374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6376 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6379 IEM_MC_ADVANCE_RIP();
6380 IEM_MC_END();
6381 return VINF_SUCCESS;
6382
6383 case IEMMODE_32BIT:
6384 IEM_MC_BEGIN(0, 1);
6385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6387 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6389 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6390 IEM_MC_ADVANCE_RIP();
6391 IEM_MC_END();
6392 return VINF_SUCCESS;
6393
6394 case IEMMODE_64BIT:
6395 IEM_MC_BEGIN(0, 1);
6396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6398 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6401 IEM_MC_ADVANCE_RIP();
6402 IEM_MC_END();
6403 return VINF_SUCCESS;
6404
6405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6406 }
6407 }
6408}
6409
6410
6411
6412
6413/**
6414 * @opcode 0xc8
6415 */
6416FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6417{
6418 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6419 IEMOP_HLP_MIN_186();
6420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6421 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6422 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6424 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6425}
6426
6427
6428/**
6429 * @opcode 0xc9
6430 */
6431FNIEMOP_DEF(iemOp_leave)
6432{
6433 IEMOP_MNEMONIC(leave, "leave");
6434 IEMOP_HLP_MIN_186();
6435 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6437 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6438}
6439
6440
6441/**
6442 * @opcode 0xca
6443 */
6444FNIEMOP_DEF(iemOp_retf_Iw)
6445{
6446 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6450 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6451}
6452
6453
6454/**
6455 * @opcode 0xcb
6456 */
6457FNIEMOP_DEF(iemOp_retf)
6458{
6459 IEMOP_MNEMONIC(retf, "retf");
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6462 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6463}
6464
6465
6466/**
6467 * @opcode 0xcc
6468 */
6469FNIEMOP_DEF(iemOp_int3)
6470{
6471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6472 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6473}
6474
6475
6476/**
6477 * @opcode 0xcd
6478 */
6479FNIEMOP_DEF(iemOp_int_Ib)
6480{
6481 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6483 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6484}
6485
6486
6487/**
6488 * @opcode 0xce
6489 */
6490FNIEMOP_DEF(iemOp_into)
6491{
6492 IEMOP_MNEMONIC(into, "into");
6493 IEMOP_HLP_NO_64BIT();
6494
6495 IEM_MC_BEGIN(2, 0);
6496 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6497 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6498 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6499 IEM_MC_END();
6500 return VINF_SUCCESS;
6501}
6502
6503
6504/**
6505 * @opcode 0xcf
6506 */
6507FNIEMOP_DEF(iemOp_iret)
6508{
6509 IEMOP_MNEMONIC(iret, "iret");
6510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6511 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6512}
6513
6514
6515/**
6516 * @opcode 0xd0
6517 */
6518FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6519{
6520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6521 PCIEMOPSHIFTSIZES pImpl;
6522 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6523 {
6524 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6525 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6526 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6527 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6528 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6529 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6530 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6531 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6532 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6533 }
6534 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6535
6536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6537 {
6538 /* register */
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 IEM_MC_BEGIN(3, 0);
6541 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6542 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6543 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6544 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6545 IEM_MC_REF_EFLAGS(pEFlags);
6546 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6547 IEM_MC_ADVANCE_RIP();
6548 IEM_MC_END();
6549 }
6550 else
6551 {
6552 /* memory */
6553 IEM_MC_BEGIN(3, 2);
6554 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6555 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6556 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6558
6559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6561 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6562 IEM_MC_FETCH_EFLAGS(EFlags);
6563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6564
6565 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6566 IEM_MC_COMMIT_EFLAGS(EFlags);
6567 IEM_MC_ADVANCE_RIP();
6568 IEM_MC_END();
6569 }
6570 return VINF_SUCCESS;
6571}
6572
6573
6574
6575/**
6576 * @opcode 0xd1
6577 */
6578FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6579{
6580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6581 PCIEMOPSHIFTSIZES pImpl;
6582 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6583 {
6584 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6585 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6586 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6587 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6588 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6589 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6590 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6591 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6592 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6593 }
6594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6595
6596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6597 {
6598 /* register */
6599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6600 switch (pVCpu->iem.s.enmEffOpSize)
6601 {
6602 case IEMMODE_16BIT:
6603 IEM_MC_BEGIN(3, 0);
6604 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6605 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6607 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6608 IEM_MC_REF_EFLAGS(pEFlags);
6609 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6610 IEM_MC_ADVANCE_RIP();
6611 IEM_MC_END();
6612 return VINF_SUCCESS;
6613
6614 case IEMMODE_32BIT:
6615 IEM_MC_BEGIN(3, 0);
6616 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6617 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6618 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6619 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6620 IEM_MC_REF_EFLAGS(pEFlags);
6621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6622 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6623 IEM_MC_ADVANCE_RIP();
6624 IEM_MC_END();
6625 return VINF_SUCCESS;
6626
6627 case IEMMODE_64BIT:
6628 IEM_MC_BEGIN(3, 0);
6629 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6630 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6631 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6632 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6633 IEM_MC_REF_EFLAGS(pEFlags);
6634 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 return VINF_SUCCESS;
6638
6639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6640 }
6641 }
6642 else
6643 {
6644 /* memory */
6645 switch (pVCpu->iem.s.enmEffOpSize)
6646 {
6647 case IEMMODE_16BIT:
6648 IEM_MC_BEGIN(3, 2);
6649 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6650 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6651 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6653
6654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6656 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6657 IEM_MC_FETCH_EFLAGS(EFlags);
6658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6659
6660 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6661 IEM_MC_COMMIT_EFLAGS(EFlags);
6662 IEM_MC_ADVANCE_RIP();
6663 IEM_MC_END();
6664 return VINF_SUCCESS;
6665
6666 case IEMMODE_32BIT:
6667 IEM_MC_BEGIN(3, 2);
6668 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6669 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6670 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6672
6673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6675 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6676 IEM_MC_FETCH_EFLAGS(EFlags);
6677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6678
6679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6680 IEM_MC_COMMIT_EFLAGS(EFlags);
6681 IEM_MC_ADVANCE_RIP();
6682 IEM_MC_END();
6683 return VINF_SUCCESS;
6684
6685 case IEMMODE_64BIT:
6686 IEM_MC_BEGIN(3, 2);
6687 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6688 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6689 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6691
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6694 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6695 IEM_MC_FETCH_EFLAGS(EFlags);
6696 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6697
6698 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6699 IEM_MC_COMMIT_EFLAGS(EFlags);
6700 IEM_MC_ADVANCE_RIP();
6701 IEM_MC_END();
6702 return VINF_SUCCESS;
6703
6704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6705 }
6706 }
6707}
6708
6709
6710/**
6711 * @opcode 0xd2
6712 */
6713FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6714{
6715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6716 PCIEMOPSHIFTSIZES pImpl;
6717 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6718 {
6719 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6720 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6721 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6722 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6723 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6724 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6725 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6726 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6727 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6728 }
6729 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6730
6731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6732 {
6733 /* register */
6734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6735 IEM_MC_BEGIN(3, 0);
6736 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6737 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6738 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6739 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6740 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6741 IEM_MC_REF_EFLAGS(pEFlags);
6742 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6743 IEM_MC_ADVANCE_RIP();
6744 IEM_MC_END();
6745 }
6746 else
6747 {
6748 /* memory */
6749 IEM_MC_BEGIN(3, 2);
6750 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6751 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6752 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6754
6755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6757 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6758 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6759 IEM_MC_FETCH_EFLAGS(EFlags);
6760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6761
6762 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6763 IEM_MC_COMMIT_EFLAGS(EFlags);
6764 IEM_MC_ADVANCE_RIP();
6765 IEM_MC_END();
6766 }
6767 return VINF_SUCCESS;
6768}
6769
6770
6771/**
6772 * @opcode 0xd3
6773 */
6774FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6775{
6776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6777 PCIEMOPSHIFTSIZES pImpl;
6778 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6779 {
6780 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6781 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6782 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6783 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6784 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6785 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6786 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6787 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6788 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6789 }
6790 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6791
6792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6793 {
6794 /* register */
6795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6796 switch (pVCpu->iem.s.enmEffOpSize)
6797 {
6798 case IEMMODE_16BIT:
6799 IEM_MC_BEGIN(3, 0);
6800 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6801 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6802 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6803 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6804 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6805 IEM_MC_REF_EFLAGS(pEFlags);
6806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6807 IEM_MC_ADVANCE_RIP();
6808 IEM_MC_END();
6809 return VINF_SUCCESS;
6810
6811 case IEMMODE_32BIT:
6812 IEM_MC_BEGIN(3, 0);
6813 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6814 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6816 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6817 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6818 IEM_MC_REF_EFLAGS(pEFlags);
6819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6820 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6821 IEM_MC_ADVANCE_RIP();
6822 IEM_MC_END();
6823 return VINF_SUCCESS;
6824
6825 case IEMMODE_64BIT:
6826 IEM_MC_BEGIN(3, 0);
6827 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6828 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6829 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6830 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6831 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6832 IEM_MC_REF_EFLAGS(pEFlags);
6833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6834 IEM_MC_ADVANCE_RIP();
6835 IEM_MC_END();
6836 return VINF_SUCCESS;
6837
6838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6839 }
6840 }
6841 else
6842 {
6843 /* memory */
6844 switch (pVCpu->iem.s.enmEffOpSize)
6845 {
6846 case IEMMODE_16BIT:
6847 IEM_MC_BEGIN(3, 2);
6848 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6849 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6850 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6852
6853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6855 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6856 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6857 IEM_MC_FETCH_EFLAGS(EFlags);
6858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6859
6860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6861 IEM_MC_COMMIT_EFLAGS(EFlags);
6862 IEM_MC_ADVANCE_RIP();
6863 IEM_MC_END();
6864 return VINF_SUCCESS;
6865
6866 case IEMMODE_32BIT:
6867 IEM_MC_BEGIN(3, 2);
6868 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6869 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6870 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6872
6873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6875 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6876 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6877 IEM_MC_FETCH_EFLAGS(EFlags);
6878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6879
6880 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6881 IEM_MC_COMMIT_EFLAGS(EFlags);
6882 IEM_MC_ADVANCE_RIP();
6883 IEM_MC_END();
6884 return VINF_SUCCESS;
6885
6886 case IEMMODE_64BIT:
6887 IEM_MC_BEGIN(3, 2);
6888 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6889 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6890 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6892
6893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6895 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6896 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6897 IEM_MC_FETCH_EFLAGS(EFlags);
6898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6899
6900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6901 IEM_MC_COMMIT_EFLAGS(EFlags);
6902 IEM_MC_ADVANCE_RIP();
6903 IEM_MC_END();
6904 return VINF_SUCCESS;
6905
6906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6907 }
6908 }
6909}
6910
6911/**
6912 * @opcode 0xd4
6913 */
6914FNIEMOP_DEF(iemOp_aam_Ib)
6915{
6916 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6917 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6919 IEMOP_HLP_NO_64BIT();
6920 if (!bImm)
6921 return IEMOP_RAISE_DIVIDE_ERROR();
6922 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6923}
6924
6925
6926/**
6927 * @opcode 0xd5
6928 */
6929FNIEMOP_DEF(iemOp_aad_Ib)
6930{
6931 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6932 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6934 IEMOP_HLP_NO_64BIT();
6935 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6936}
6937
6938
6939/**
6940 * @opcode 0xd6
6941 */
6942FNIEMOP_DEF(iemOp_salc)
6943{
6944 IEMOP_MNEMONIC(salc, "salc");
6945 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6946 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6948 IEMOP_HLP_NO_64BIT();
6949
6950 IEM_MC_BEGIN(0, 0);
6951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6952 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6953 } IEM_MC_ELSE() {
6954 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6955 } IEM_MC_ENDIF();
6956 IEM_MC_ADVANCE_RIP();
6957 IEM_MC_END();
6958 return VINF_SUCCESS;
6959}
6960
6961
6962/**
6963 * @opcode 0xd7
6964 */
6965FNIEMOP_DEF(iemOp_xlat)
6966{
6967 IEMOP_MNEMONIC(xlat, "xlat");
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 switch (pVCpu->iem.s.enmEffAddrMode)
6970 {
6971 case IEMMODE_16BIT:
6972 IEM_MC_BEGIN(2, 0);
6973 IEM_MC_LOCAL(uint8_t, u8Tmp);
6974 IEM_MC_LOCAL(uint16_t, u16Addr);
6975 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6976 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6977 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6978 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6979 IEM_MC_ADVANCE_RIP();
6980 IEM_MC_END();
6981 return VINF_SUCCESS;
6982
6983 case IEMMODE_32BIT:
6984 IEM_MC_BEGIN(2, 0);
6985 IEM_MC_LOCAL(uint8_t, u8Tmp);
6986 IEM_MC_LOCAL(uint32_t, u32Addr);
6987 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6988 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6989 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6990 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6991 IEM_MC_ADVANCE_RIP();
6992 IEM_MC_END();
6993 return VINF_SUCCESS;
6994
6995 case IEMMODE_64BIT:
6996 IEM_MC_BEGIN(2, 0);
6997 IEM_MC_LOCAL(uint8_t, u8Tmp);
6998 IEM_MC_LOCAL(uint64_t, u64Addr);
6999 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7000 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7001 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7002 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7003 IEM_MC_ADVANCE_RIP();
7004 IEM_MC_END();
7005 return VINF_SUCCESS;
7006
7007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7008 }
7009}
7010
7011
7012/**
7013 * Common worker for FPU instructions working on ST0 and STn, and storing the
7014 * result in ST0.
7015 *
7016 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7017 */
7018FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7019{
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021
7022 IEM_MC_BEGIN(3, 1);
7023 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7024 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7025 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7027
7028 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7029 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7030 IEM_MC_PREPARE_FPU_USAGE();
7031 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7032 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7033 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7034 IEM_MC_ELSE()
7035 IEM_MC_FPU_STACK_UNDERFLOW(0);
7036 IEM_MC_ENDIF();
7037 IEM_MC_ADVANCE_RIP();
7038
7039 IEM_MC_END();
7040 return VINF_SUCCESS;
7041}
7042
7043
7044/**
7045 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7046 * flags.
7047 *
7048 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7049 */
7050FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7051{
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053
7054 IEM_MC_BEGIN(3, 1);
7055 IEM_MC_LOCAL(uint16_t, u16Fsw);
7056 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7057 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7058 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7059
7060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7062 IEM_MC_PREPARE_FPU_USAGE();
7063 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7064 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7065 IEM_MC_UPDATE_FSW(u16Fsw);
7066 IEM_MC_ELSE()
7067 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7068 IEM_MC_ENDIF();
7069 IEM_MC_ADVANCE_RIP();
7070
7071 IEM_MC_END();
7072 return VINF_SUCCESS;
7073}
7074
7075
7076/**
7077 * Common worker for FPU instructions working on ST0 and STn, only affecting
7078 * flags, and popping when done.
7079 *
7080 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7081 */
7082FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7083{
7084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7085
7086 IEM_MC_BEGIN(3, 1);
7087 IEM_MC_LOCAL(uint16_t, u16Fsw);
7088 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7091
7092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7094 IEM_MC_PREPARE_FPU_USAGE();
7095 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7096 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7097 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7098 IEM_MC_ELSE()
7099 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7100 IEM_MC_ENDIF();
7101 IEM_MC_ADVANCE_RIP();
7102
7103 IEM_MC_END();
7104 return VINF_SUCCESS;
7105}
7106
7107
7108/** Opcode 0xd8 11/0. */
7109FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7110{
7111 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7112 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7113}
7114
7115
7116/** Opcode 0xd8 11/1. */
7117FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7118{
7119 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7120 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7121}
7122
7123
7124/** Opcode 0xd8 11/2. */
7125FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7126{
7127 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7128 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7129}
7130
7131
7132/** Opcode 0xd8 11/3. */
7133FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7134{
7135 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7136 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7137}
7138
7139
7140/** Opcode 0xd8 11/4. */
7141FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7142{
7143 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7144 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7145}
7146
7147
7148/** Opcode 0xd8 11/5. */
7149FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7150{
7151 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7152 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7153}
7154
7155
7156/** Opcode 0xd8 11/6. */
7157FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7158{
7159 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7160 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7161}
7162
7163
7164/** Opcode 0xd8 11/7. */
7165FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7166{
7167 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7168 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7169}
7170
7171
7172/**
7173 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7174 * the result in ST0.
7175 *
7176 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7177 */
7178FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7179{
7180 IEM_MC_BEGIN(3, 3);
7181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7182 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7183 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7184 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7185 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7186 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7187
7188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7190
7191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7193 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7194
7195 IEM_MC_PREPARE_FPU_USAGE();
7196 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7197 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7198 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7199 IEM_MC_ELSE()
7200 IEM_MC_FPU_STACK_UNDERFLOW(0);
7201 IEM_MC_ENDIF();
7202 IEM_MC_ADVANCE_RIP();
7203
7204 IEM_MC_END();
7205 return VINF_SUCCESS;
7206}
7207
7208
7209/** Opcode 0xd8 !11/0. */
7210FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7211{
7212 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7213 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7214}
7215
7216
7217/** Opcode 0xd8 !11/1. */
7218FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7219{
7220 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7221 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7222}
7223
7224
7225/** Opcode 0xd8 !11/2. */
7226FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7227{
7228 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7229
7230 IEM_MC_BEGIN(3, 3);
7231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7232 IEM_MC_LOCAL(uint16_t, u16Fsw);
7233 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7234 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7235 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7236 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7237
7238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7240
7241 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7242 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7243 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7244
7245 IEM_MC_PREPARE_FPU_USAGE();
7246 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7247 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7248 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7249 IEM_MC_ELSE()
7250 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7251 IEM_MC_ENDIF();
7252 IEM_MC_ADVANCE_RIP();
7253
7254 IEM_MC_END();
7255 return VINF_SUCCESS;
7256}
7257
7258
7259/** Opcode 0xd8 !11/3. */
7260FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7261{
7262 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7263
7264 IEM_MC_BEGIN(3, 3);
7265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7266 IEM_MC_LOCAL(uint16_t, u16Fsw);
7267 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7268 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7269 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7270 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7271
7272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7274
7275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7277 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7278
7279 IEM_MC_PREPARE_FPU_USAGE();
7280 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7281 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7282 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7283 IEM_MC_ELSE()
7284 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7285 IEM_MC_ENDIF();
7286 IEM_MC_ADVANCE_RIP();
7287
7288 IEM_MC_END();
7289 return VINF_SUCCESS;
7290}
7291
7292
7293/** Opcode 0xd8 !11/4. */
7294FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7295{
7296 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7297 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7298}
7299
7300
7301/** Opcode 0xd8 !11/5. */
7302FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7303{
7304 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7305 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7306}
7307
7308
7309/** Opcode 0xd8 !11/6. */
7310FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7311{
7312 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7313 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7314}
7315
7316
7317/** Opcode 0xd8 !11/7. */
7318FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7319{
7320 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7321 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7322}
7323
7324
7325/**
7326 * @opcode 0xd8
7327 */
7328FNIEMOP_DEF(iemOp_EscF0)
7329{
7330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7331 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7332
7333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7334 {
7335 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7336 {
7337 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7338 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7339 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7340 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7341 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7342 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7343 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7344 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7346 }
7347 }
7348 else
7349 {
7350 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7351 {
7352 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7353 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7354 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7355 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7356 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7357 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7358 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7359 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7361 }
7362 }
7363}
7364
7365
7366/** Opcode 0xd9 /0 mem32real
7367 * @sa iemOp_fld_m64r */
7368FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7369{
7370 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7371
7372 IEM_MC_BEGIN(2, 3);
7373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7374 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7375 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7376 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7377 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7378
7379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7381
7382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7384 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7385
7386 IEM_MC_PREPARE_FPU_USAGE();
7387 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7388 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7389 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7390 IEM_MC_ELSE()
7391 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7392 IEM_MC_ENDIF();
7393 IEM_MC_ADVANCE_RIP();
7394
7395 IEM_MC_END();
7396 return VINF_SUCCESS;
7397}
7398
7399
7400/** Opcode 0xd9 !11/2 mem32real */
7401FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7402{
7403 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7404 IEM_MC_BEGIN(3, 2);
7405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7406 IEM_MC_LOCAL(uint16_t, u16Fsw);
7407 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7408 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7409 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7410
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7415
7416 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7417 IEM_MC_PREPARE_FPU_USAGE();
7418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7419 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7420 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7421 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7422 IEM_MC_ELSE()
7423 IEM_MC_IF_FCW_IM()
7424 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7425 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7426 IEM_MC_ENDIF();
7427 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7428 IEM_MC_ENDIF();
7429 IEM_MC_ADVANCE_RIP();
7430
7431 IEM_MC_END();
7432 return VINF_SUCCESS;
7433}
7434
7435
7436/** Opcode 0xd9 !11/3 */
7437FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7438{
7439 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7440 IEM_MC_BEGIN(3, 2);
7441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7442 IEM_MC_LOCAL(uint16_t, u16Fsw);
7443 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7444 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7445 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7446
7447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7451
7452 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7453 IEM_MC_PREPARE_FPU_USAGE();
7454 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7455 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7456 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7457 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7458 IEM_MC_ELSE()
7459 IEM_MC_IF_FCW_IM()
7460 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7461 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7462 IEM_MC_ENDIF();
7463 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7464 IEM_MC_ENDIF();
7465 IEM_MC_ADVANCE_RIP();
7466
7467 IEM_MC_END();
7468 return VINF_SUCCESS;
7469}
7470
7471
7472/** Opcode 0xd9 !11/4 */
7473FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7474{
7475 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7476 IEM_MC_BEGIN(3, 0);
7477 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7478 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7479 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7484 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7485 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7486 IEM_MC_END();
7487 return VINF_SUCCESS;
7488}
7489
7490
7491/** Opcode 0xd9 !11/5 */
7492FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7493{
7494 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7495 IEM_MC_BEGIN(1, 1);
7496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7497 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7502 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7503 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7504 IEM_MC_END();
7505 return VINF_SUCCESS;
7506}
7507
7508
7509/** Opcode 0xd9 !11/6 */
7510FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7511{
7512 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7513 IEM_MC_BEGIN(3, 0);
7514 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7515 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7516 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7520 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7521 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7522 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7523 IEM_MC_END();
7524 return VINF_SUCCESS;
7525}
7526
7527
7528/** Opcode 0xd9 !11/7 */
7529FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7530{
7531 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7532 IEM_MC_BEGIN(2, 0);
7533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7534 IEM_MC_LOCAL(uint16_t, u16Fcw);
7535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7538 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7539 IEM_MC_FETCH_FCW(u16Fcw);
7540 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7541 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7542 IEM_MC_END();
7543 return VINF_SUCCESS;
7544}
7545
7546
7547/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7548FNIEMOP_DEF(iemOp_fnop)
7549{
7550 IEMOP_MNEMONIC(fnop, "fnop");
7551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7552
7553 IEM_MC_BEGIN(0, 0);
7554 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7555 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7556 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7557 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7558 * intel optimizations. Investigate. */
7559 IEM_MC_UPDATE_FPU_OPCODE_IP();
7560 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7561 IEM_MC_END();
7562 return VINF_SUCCESS;
7563}
7564
7565
7566/** Opcode 0xd9 11/0 stN */
7567FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7568{
7569 IEMOP_MNEMONIC(fld_stN, "fld stN");
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7571
7572 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7573 * indicates that it does. */
7574 IEM_MC_BEGIN(0, 2);
7575 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7576 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7579
7580 IEM_MC_PREPARE_FPU_USAGE();
7581 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7582 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7583 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7584 IEM_MC_ELSE()
7585 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7586 IEM_MC_ENDIF();
7587
7588 IEM_MC_ADVANCE_RIP();
7589 IEM_MC_END();
7590
7591 return VINF_SUCCESS;
7592}
7593
7594
7595/** Opcode 0xd9 11/3 stN */
7596FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7597{
7598 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7600
7601 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7602 * indicates that it does. */
7603 IEM_MC_BEGIN(1, 3);
7604 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7605 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7606 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7607 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7610
7611 IEM_MC_PREPARE_FPU_USAGE();
7612 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7613 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7614 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7615 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7616 IEM_MC_ELSE()
7617 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7618 IEM_MC_ENDIF();
7619
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622
7623 return VINF_SUCCESS;
7624}
7625
7626
7627/** Opcode 0xd9 11/4, 0xdd 11/2. */
7628FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7629{
7630 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7632
7633 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7634 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7635 if (!iDstReg)
7636 {
7637 IEM_MC_BEGIN(0, 1);
7638 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7641
7642 IEM_MC_PREPARE_FPU_USAGE();
7643 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7644 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7645 IEM_MC_ELSE()
7646 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7647 IEM_MC_ENDIF();
7648
7649 IEM_MC_ADVANCE_RIP();
7650 IEM_MC_END();
7651 }
7652 else
7653 {
7654 IEM_MC_BEGIN(0, 2);
7655 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7656 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7659
7660 IEM_MC_PREPARE_FPU_USAGE();
7661 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7662 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7663 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7664 IEM_MC_ELSE()
7665 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7666 IEM_MC_ENDIF();
7667
7668 IEM_MC_ADVANCE_RIP();
7669 IEM_MC_END();
7670 }
7671 return VINF_SUCCESS;
7672}
7673
7674
7675/**
7676 * Common worker for FPU instructions working on ST0 and replaces it with the
7677 * result, i.e. unary operators.
7678 *
7679 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7680 */
7681FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7682{
7683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7684
7685 IEM_MC_BEGIN(2, 1);
7686 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7687 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7688 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7689
7690 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7692 IEM_MC_PREPARE_FPU_USAGE();
7693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7694 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7695 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7696 IEM_MC_ELSE()
7697 IEM_MC_FPU_STACK_UNDERFLOW(0);
7698 IEM_MC_ENDIF();
7699 IEM_MC_ADVANCE_RIP();
7700
7701 IEM_MC_END();
7702 return VINF_SUCCESS;
7703}
7704
7705
7706/** Opcode 0xd9 0xe0. */
7707FNIEMOP_DEF(iemOp_fchs)
7708{
7709 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7710 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7711}
7712
7713
7714/** Opcode 0xd9 0xe1. */
7715FNIEMOP_DEF(iemOp_fabs)
7716{
7717 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7718 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7719}
7720
7721
7722/**
7723 * Common worker for FPU instructions working on ST0 and only returns FSW.
7724 *
7725 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7726 */
7727FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7728{
7729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7730
7731 IEM_MC_BEGIN(2, 1);
7732 IEM_MC_LOCAL(uint16_t, u16Fsw);
7733 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7735
7736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7738 IEM_MC_PREPARE_FPU_USAGE();
7739 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7740 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7741 IEM_MC_UPDATE_FSW(u16Fsw);
7742 IEM_MC_ELSE()
7743 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7744 IEM_MC_ENDIF();
7745 IEM_MC_ADVANCE_RIP();
7746
7747 IEM_MC_END();
7748 return VINF_SUCCESS;
7749}
7750
7751
7752/** Opcode 0xd9 0xe4. */
7753FNIEMOP_DEF(iemOp_ftst)
7754{
7755 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7756 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7757}
7758
7759
7760/** Opcode 0xd9 0xe5. */
7761FNIEMOP_DEF(iemOp_fxam)
7762{
7763 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7764 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7765}
7766
7767
7768/**
7769 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7770 *
7771 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7772 */
7773FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7774{
7775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7776
7777 IEM_MC_BEGIN(1, 1);
7778 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7779 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7780
7781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7782 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7783 IEM_MC_PREPARE_FPU_USAGE();
7784 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7785 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7786 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7787 IEM_MC_ELSE()
7788 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7789 IEM_MC_ENDIF();
7790 IEM_MC_ADVANCE_RIP();
7791
7792 IEM_MC_END();
7793 return VINF_SUCCESS;
7794}
7795
7796
7797/** Opcode 0xd9 0xe8. */
7798FNIEMOP_DEF(iemOp_fld1)
7799{
7800 IEMOP_MNEMONIC(fld1, "fld1");
7801 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7802}
7803
7804
7805/** Opcode 0xd9 0xe9. */
7806FNIEMOP_DEF(iemOp_fldl2t)
7807{
7808 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7809 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7810}
7811
7812
7813/** Opcode 0xd9 0xea. */
7814FNIEMOP_DEF(iemOp_fldl2e)
7815{
7816 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7817 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7818}
7819
7820/** Opcode 0xd9 0xeb. */
7821FNIEMOP_DEF(iemOp_fldpi)
7822{
7823 IEMOP_MNEMONIC(fldpi, "fldpi");
7824 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7825}
7826
7827
7828/** Opcode 0xd9 0xec. */
7829FNIEMOP_DEF(iemOp_fldlg2)
7830{
7831 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7832 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7833}
7834
7835/** Opcode 0xd9 0xed. */
7836FNIEMOP_DEF(iemOp_fldln2)
7837{
7838 IEMOP_MNEMONIC(fldln2, "fldln2");
7839 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7840}
7841
7842
7843/** Opcode 0xd9 0xee. */
7844FNIEMOP_DEF(iemOp_fldz)
7845{
7846 IEMOP_MNEMONIC(fldz, "fldz");
7847 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7848}
7849
7850
7851/** Opcode 0xd9 0xf0. */
7852FNIEMOP_DEF(iemOp_f2xm1)
7853{
7854 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7855 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7856}
7857
7858
7859/**
7860 * Common worker for FPU instructions working on STn and ST0, storing the result
7861 * in STn, and popping the stack unless IE, DE or ZE was raised.
7862 *
7863 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7864 */
7865FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7866{
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868
7869 IEM_MC_BEGIN(3, 1);
7870 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7871 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7872 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7874
7875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7877
7878 IEM_MC_PREPARE_FPU_USAGE();
7879 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7880 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7881 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7882 IEM_MC_ELSE()
7883 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7884 IEM_MC_ENDIF();
7885 IEM_MC_ADVANCE_RIP();
7886
7887 IEM_MC_END();
7888 return VINF_SUCCESS;
7889}
7890
7891
7892/** Opcode 0xd9 0xf1. */
7893FNIEMOP_DEF(iemOp_fyl2x)
7894{
7895 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7896 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7897}
7898
7899
7900/**
7901 * Common worker for FPU instructions working on ST0 and having two outputs, one
7902 * replacing ST0 and one pushed onto the stack.
7903 *
7904 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7905 */
7906FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7907{
7908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7909
7910 IEM_MC_BEGIN(2, 1);
7911 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7912 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7913 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7914
7915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7917 IEM_MC_PREPARE_FPU_USAGE();
7918 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7919 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7920 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7921 IEM_MC_ELSE()
7922 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7923 IEM_MC_ENDIF();
7924 IEM_MC_ADVANCE_RIP();
7925
7926 IEM_MC_END();
7927 return VINF_SUCCESS;
7928}
7929
7930
7931/** Opcode 0xd9 0xf2. */
7932FNIEMOP_DEF(iemOp_fptan)
7933{
7934 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7935 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7936}
7937
7938
7939/** Opcode 0xd9 0xf3. */
7940FNIEMOP_DEF(iemOp_fpatan)
7941{
7942 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7943 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7944}
7945
7946
7947/** Opcode 0xd9 0xf4. */
7948FNIEMOP_DEF(iemOp_fxtract)
7949{
7950 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7951 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7952}
7953
7954
7955/** Opcode 0xd9 0xf5. */
7956FNIEMOP_DEF(iemOp_fprem1)
7957{
7958 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7959 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7960}
7961
7962
7963/** Opcode 0xd9 0xf6. */
7964FNIEMOP_DEF(iemOp_fdecstp)
7965{
7966 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7968 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7969 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7970 * FINCSTP and FDECSTP. */
7971
7972 IEM_MC_BEGIN(0,0);
7973
7974 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7975 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7976
7977 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7978 IEM_MC_FPU_STACK_DEC_TOP();
7979 IEM_MC_UPDATE_FSW_CONST(0);
7980
7981 IEM_MC_ADVANCE_RIP();
7982 IEM_MC_END();
7983 return VINF_SUCCESS;
7984}
7985
7986
7987/** Opcode 0xd9 0xf7. */
7988FNIEMOP_DEF(iemOp_fincstp)
7989{
7990 IEMOP_MNEMONIC(fincstp, "fincstp");
7991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7992 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7993 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7994 * FINCSTP and FDECSTP. */
7995
7996 IEM_MC_BEGIN(0,0);
7997
7998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8000
8001 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8002 IEM_MC_FPU_STACK_INC_TOP();
8003 IEM_MC_UPDATE_FSW_CONST(0);
8004
8005 IEM_MC_ADVANCE_RIP();
8006 IEM_MC_END();
8007 return VINF_SUCCESS;
8008}
8009
8010
8011/** Opcode 0xd9 0xf8. */
8012FNIEMOP_DEF(iemOp_fprem)
8013{
8014 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8015 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8016}
8017
8018
8019/** Opcode 0xd9 0xf9. */
8020FNIEMOP_DEF(iemOp_fyl2xp1)
8021{
8022 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8023 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8024}
8025
8026
8027/** Opcode 0xd9 0xfa. */
8028FNIEMOP_DEF(iemOp_fsqrt)
8029{
8030 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8031 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8032}
8033
8034
8035/** Opcode 0xd9 0xfb. */
8036FNIEMOP_DEF(iemOp_fsincos)
8037{
8038 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8039 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8040}
8041
8042
8043/** Opcode 0xd9 0xfc. */
8044FNIEMOP_DEF(iemOp_frndint)
8045{
8046 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8047 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8048}
8049
8050
8051/** Opcode 0xd9 0xfd. */
8052FNIEMOP_DEF(iemOp_fscale)
8053{
8054 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8055 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8056}
8057
8058
8059/** Opcode 0xd9 0xfe. */
8060FNIEMOP_DEF(iemOp_fsin)
8061{
8062 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8063 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8064}
8065
8066
8067/** Opcode 0xd9 0xff. */
8068FNIEMOP_DEF(iemOp_fcos)
8069{
8070 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8071 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8072}
8073
8074
8075/** Used by iemOp_EscF1. */
8076IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8077{
8078 /* 0xe0 */ iemOp_fchs,
8079 /* 0xe1 */ iemOp_fabs,
8080 /* 0xe2 */ iemOp_Invalid,
8081 /* 0xe3 */ iemOp_Invalid,
8082 /* 0xe4 */ iemOp_ftst,
8083 /* 0xe5 */ iemOp_fxam,
8084 /* 0xe6 */ iemOp_Invalid,
8085 /* 0xe7 */ iemOp_Invalid,
8086 /* 0xe8 */ iemOp_fld1,
8087 /* 0xe9 */ iemOp_fldl2t,
8088 /* 0xea */ iemOp_fldl2e,
8089 /* 0xeb */ iemOp_fldpi,
8090 /* 0xec */ iemOp_fldlg2,
8091 /* 0xed */ iemOp_fldln2,
8092 /* 0xee */ iemOp_fldz,
8093 /* 0xef */ iemOp_Invalid,
8094 /* 0xf0 */ iemOp_f2xm1,
8095 /* 0xf1 */ iemOp_fyl2x,
8096 /* 0xf2 */ iemOp_fptan,
8097 /* 0xf3 */ iemOp_fpatan,
8098 /* 0xf4 */ iemOp_fxtract,
8099 /* 0xf5 */ iemOp_fprem1,
8100 /* 0xf6 */ iemOp_fdecstp,
8101 /* 0xf7 */ iemOp_fincstp,
8102 /* 0xf8 */ iemOp_fprem,
8103 /* 0xf9 */ iemOp_fyl2xp1,
8104 /* 0xfa */ iemOp_fsqrt,
8105 /* 0xfb */ iemOp_fsincos,
8106 /* 0xfc */ iemOp_frndint,
8107 /* 0xfd */ iemOp_fscale,
8108 /* 0xfe */ iemOp_fsin,
8109 /* 0xff */ iemOp_fcos
8110};
8111
8112
8113/**
8114 * @opcode 0xd9
8115 */
8116FNIEMOP_DEF(iemOp_EscF1)
8117{
8118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8119 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8120
8121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8122 {
8123 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8124 {
8125 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8126 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8127 case 2:
8128 if (bRm == 0xd0)
8129 return FNIEMOP_CALL(iemOp_fnop);
8130 return IEMOP_RAISE_INVALID_OPCODE();
8131 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8132 case 4:
8133 case 5:
8134 case 6:
8135 case 7:
8136 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8137 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8139 }
8140 }
8141 else
8142 {
8143 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8144 {
8145 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8146 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8147 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8148 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8149 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8150 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8151 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8152 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8154 }
8155 }
8156}
8157
8158
8159/** Opcode 0xda 11/0. */
8160FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8161{
8162 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8164
8165 IEM_MC_BEGIN(0, 1);
8166 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8167
8168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8169 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8170
8171 IEM_MC_PREPARE_FPU_USAGE();
8172 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8173 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8174 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8175 IEM_MC_ENDIF();
8176 IEM_MC_UPDATE_FPU_OPCODE_IP();
8177 IEM_MC_ELSE()
8178 IEM_MC_FPU_STACK_UNDERFLOW(0);
8179 IEM_MC_ENDIF();
8180 IEM_MC_ADVANCE_RIP();
8181
8182 IEM_MC_END();
8183 return VINF_SUCCESS;
8184}
8185
8186
8187/** Opcode 0xda 11/1. */
8188FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8189{
8190 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192
8193 IEM_MC_BEGIN(0, 1);
8194 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8195
8196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8197 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8198
8199 IEM_MC_PREPARE_FPU_USAGE();
8200 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8202 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8203 IEM_MC_ENDIF();
8204 IEM_MC_UPDATE_FPU_OPCODE_IP();
8205 IEM_MC_ELSE()
8206 IEM_MC_FPU_STACK_UNDERFLOW(0);
8207 IEM_MC_ENDIF();
8208 IEM_MC_ADVANCE_RIP();
8209
8210 IEM_MC_END();
8211 return VINF_SUCCESS;
8212}
8213
8214
8215/** Opcode 0xda 11/2. */
8216FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8217{
8218 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8220
8221 IEM_MC_BEGIN(0, 1);
8222 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8223
8224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8226
8227 IEM_MC_PREPARE_FPU_USAGE();
8228 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8229 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8230 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8231 IEM_MC_ENDIF();
8232 IEM_MC_UPDATE_FPU_OPCODE_IP();
8233 IEM_MC_ELSE()
8234 IEM_MC_FPU_STACK_UNDERFLOW(0);
8235 IEM_MC_ENDIF();
8236 IEM_MC_ADVANCE_RIP();
8237
8238 IEM_MC_END();
8239 return VINF_SUCCESS;
8240}
8241
8242
8243/** Opcode 0xda 11/3. */
8244FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8245{
8246 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248
8249 IEM_MC_BEGIN(0, 1);
8250 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8251
8252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8254
8255 IEM_MC_PREPARE_FPU_USAGE();
8256 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8258 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8259 IEM_MC_ENDIF();
8260 IEM_MC_UPDATE_FPU_OPCODE_IP();
8261 IEM_MC_ELSE()
8262 IEM_MC_FPU_STACK_UNDERFLOW(0);
8263 IEM_MC_ENDIF();
8264 IEM_MC_ADVANCE_RIP();
8265
8266 IEM_MC_END();
8267 return VINF_SUCCESS;
8268}
8269
8270
8271/**
8272 * Common worker for FPU instructions working on ST0 and STn, only affecting
8273 * flags, and popping twice when done.
8274 *
8275 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8276 */
8277FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8278{
8279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8280
8281 IEM_MC_BEGIN(3, 1);
8282 IEM_MC_LOCAL(uint16_t, u16Fsw);
8283 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8284 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8285 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8286
8287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8289
8290 IEM_MC_PREPARE_FPU_USAGE();
8291 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8292 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8293 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8294 IEM_MC_ELSE()
8295 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8296 IEM_MC_ENDIF();
8297 IEM_MC_ADVANCE_RIP();
8298
8299 IEM_MC_END();
8300 return VINF_SUCCESS;
8301}
8302
8303
8304/** Opcode 0xda 0xe9. */
8305FNIEMOP_DEF(iemOp_fucompp)
8306{
8307 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8308 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8309}
8310
8311
8312/**
8313 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8314 * the result in ST0.
8315 *
8316 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8317 */
8318FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8319{
8320 IEM_MC_BEGIN(3, 3);
8321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8322 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8323 IEM_MC_LOCAL(int32_t, i32Val2);
8324 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8326 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8327
8328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8330
8331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8333 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8334
8335 IEM_MC_PREPARE_FPU_USAGE();
8336 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8337 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8338 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8339 IEM_MC_ELSE()
8340 IEM_MC_FPU_STACK_UNDERFLOW(0);
8341 IEM_MC_ENDIF();
8342 IEM_MC_ADVANCE_RIP();
8343
8344 IEM_MC_END();
8345 return VINF_SUCCESS;
8346}
8347
8348
8349/** Opcode 0xda !11/0. */
8350FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8351{
8352 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8353 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8354}
8355
8356
8357/** Opcode 0xda !11/1. */
8358FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8359{
8360 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8361 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8362}
8363
8364
8365/** Opcode 0xda !11/2. */
8366FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8367{
8368 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8369
8370 IEM_MC_BEGIN(3, 3);
8371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8372 IEM_MC_LOCAL(uint16_t, u16Fsw);
8373 IEM_MC_LOCAL(int32_t, i32Val2);
8374 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8375 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8376 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8377
8378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8380
8381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8383 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8384
8385 IEM_MC_PREPARE_FPU_USAGE();
8386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8387 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8388 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8389 IEM_MC_ELSE()
8390 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8391 IEM_MC_ENDIF();
8392 IEM_MC_ADVANCE_RIP();
8393
8394 IEM_MC_END();
8395 return VINF_SUCCESS;
8396}
8397
8398
8399/** Opcode 0xda !11/3. */
8400FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8401{
8402 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8403
8404 IEM_MC_BEGIN(3, 3);
8405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8406 IEM_MC_LOCAL(uint16_t, u16Fsw);
8407 IEM_MC_LOCAL(int32_t, i32Val2);
8408 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8409 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8410 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8411
8412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8414
8415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8417 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8418
8419 IEM_MC_PREPARE_FPU_USAGE();
8420 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8421 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8422 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8423 IEM_MC_ELSE()
8424 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8425 IEM_MC_ENDIF();
8426 IEM_MC_ADVANCE_RIP();
8427
8428 IEM_MC_END();
8429 return VINF_SUCCESS;
8430}
8431
8432
8433/** Opcode 0xda !11/4. */
8434FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8435{
8436 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8437 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8438}
8439
8440
8441/** Opcode 0xda !11/5. */
8442FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8443{
8444 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8445 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8446}
8447
8448
8449/** Opcode 0xda !11/6. */
8450FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8451{
8452 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8453 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8454}
8455
8456
8457/** Opcode 0xda !11/7. */
8458FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8459{
8460 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8461 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8462}
8463
8464
8465/**
8466 * @opcode 0xda
8467 */
8468FNIEMOP_DEF(iemOp_EscF2)
8469{
8470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8471 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8473 {
8474 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8475 {
8476 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8477 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8478 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8479 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8480 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8481 case 5:
8482 if (bRm == 0xe9)
8483 return FNIEMOP_CALL(iemOp_fucompp);
8484 return IEMOP_RAISE_INVALID_OPCODE();
8485 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8486 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8488 }
8489 }
8490 else
8491 {
8492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8493 {
8494 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8495 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8496 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8497 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8498 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8499 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8500 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8501 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8503 }
8504 }
8505}
8506
8507
8508/** Opcode 0xdb !11/0. */
8509FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8510{
8511 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8512
8513 IEM_MC_BEGIN(2, 3);
8514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8515 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8516 IEM_MC_LOCAL(int32_t, i32Val);
8517 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8518 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8519
8520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8522
8523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8525 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8526
8527 IEM_MC_PREPARE_FPU_USAGE();
8528 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8529 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8530 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8531 IEM_MC_ELSE()
8532 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8533 IEM_MC_ENDIF();
8534 IEM_MC_ADVANCE_RIP();
8535
8536 IEM_MC_END();
8537 return VINF_SUCCESS;
8538}
8539
8540
8541/** Opcode 0xdb !11/1. */
8542FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8543{
8544 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8545 IEM_MC_BEGIN(3, 2);
8546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8547 IEM_MC_LOCAL(uint16_t, u16Fsw);
8548 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8549 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8550 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8551
8552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8555 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8556
8557 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8558 IEM_MC_PREPARE_FPU_USAGE();
8559 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8560 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8561 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8562 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8563 IEM_MC_ELSE()
8564 IEM_MC_IF_FCW_IM()
8565 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8566 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8567 IEM_MC_ENDIF();
8568 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8569 IEM_MC_ENDIF();
8570 IEM_MC_ADVANCE_RIP();
8571
8572 IEM_MC_END();
8573 return VINF_SUCCESS;
8574}
8575
8576
8577/** Opcode 0xdb !11/2. */
8578FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8579{
8580 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8581 IEM_MC_BEGIN(3, 2);
8582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8583 IEM_MC_LOCAL(uint16_t, u16Fsw);
8584 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8585 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8586 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8587
8588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8592
8593 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8594 IEM_MC_PREPARE_FPU_USAGE();
8595 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8596 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8597 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8598 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8599 IEM_MC_ELSE()
8600 IEM_MC_IF_FCW_IM()
8601 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8602 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8603 IEM_MC_ENDIF();
8604 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8605 IEM_MC_ENDIF();
8606 IEM_MC_ADVANCE_RIP();
8607
8608 IEM_MC_END();
8609 return VINF_SUCCESS;
8610}
8611
8612
8613/** Opcode 0xdb !11/3. */
8614FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8615{
8616 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8617 IEM_MC_BEGIN(3, 2);
8618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8619 IEM_MC_LOCAL(uint16_t, u16Fsw);
8620 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8621 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8622 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8623
8624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8628
8629 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8630 IEM_MC_PREPARE_FPU_USAGE();
8631 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8632 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8633 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8634 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8635 IEM_MC_ELSE()
8636 IEM_MC_IF_FCW_IM()
8637 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8638 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8639 IEM_MC_ENDIF();
8640 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8641 IEM_MC_ENDIF();
8642 IEM_MC_ADVANCE_RIP();
8643
8644 IEM_MC_END();
8645 return VINF_SUCCESS;
8646}
8647
8648
8649/** Opcode 0xdb !11/5. */
8650FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8651{
8652 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8653
8654 IEM_MC_BEGIN(2, 3);
8655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8656 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8657 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8658 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8659 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8660
8661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8663
8664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8666 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8667
8668 IEM_MC_PREPARE_FPU_USAGE();
8669 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8670 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8671 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8672 IEM_MC_ELSE()
8673 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8674 IEM_MC_ENDIF();
8675 IEM_MC_ADVANCE_RIP();
8676
8677 IEM_MC_END();
8678 return VINF_SUCCESS;
8679}
8680
8681
8682/** Opcode 0xdb !11/7. */
8683FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8684{
8685 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8686 IEM_MC_BEGIN(3, 2);
8687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8688 IEM_MC_LOCAL(uint16_t, u16Fsw);
8689 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8690 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8691 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8692
8693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8697
8698 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8699 IEM_MC_PREPARE_FPU_USAGE();
8700 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8701 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8702 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8703 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8704 IEM_MC_ELSE()
8705 IEM_MC_IF_FCW_IM()
8706 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8707 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8708 IEM_MC_ENDIF();
8709 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8710 IEM_MC_ENDIF();
8711 IEM_MC_ADVANCE_RIP();
8712
8713 IEM_MC_END();
8714 return VINF_SUCCESS;
8715}
8716
8717
8718/** Opcode 0xdb 11/0. */
8719FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8720{
8721 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723
8724 IEM_MC_BEGIN(0, 1);
8725 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8726
8727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8729
8730 IEM_MC_PREPARE_FPU_USAGE();
8731 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8732 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8733 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8734 IEM_MC_ENDIF();
8735 IEM_MC_UPDATE_FPU_OPCODE_IP();
8736 IEM_MC_ELSE()
8737 IEM_MC_FPU_STACK_UNDERFLOW(0);
8738 IEM_MC_ENDIF();
8739 IEM_MC_ADVANCE_RIP();
8740
8741 IEM_MC_END();
8742 return VINF_SUCCESS;
8743}
8744
8745
8746/** Opcode 0xdb 11/1. */
8747FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8748{
8749 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8751
8752 IEM_MC_BEGIN(0, 1);
8753 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8754
8755 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8756 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8757
8758 IEM_MC_PREPARE_FPU_USAGE();
8759 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8760 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8761 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8762 IEM_MC_ENDIF();
8763 IEM_MC_UPDATE_FPU_OPCODE_IP();
8764 IEM_MC_ELSE()
8765 IEM_MC_FPU_STACK_UNDERFLOW(0);
8766 IEM_MC_ENDIF();
8767 IEM_MC_ADVANCE_RIP();
8768
8769 IEM_MC_END();
8770 return VINF_SUCCESS;
8771}
8772
8773
8774/** Opcode 0xdb 11/2. */
8775FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8776{
8777 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779
8780 IEM_MC_BEGIN(0, 1);
8781 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8782
8783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8785
8786 IEM_MC_PREPARE_FPU_USAGE();
8787 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8788 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8789 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8790 IEM_MC_ENDIF();
8791 IEM_MC_UPDATE_FPU_OPCODE_IP();
8792 IEM_MC_ELSE()
8793 IEM_MC_FPU_STACK_UNDERFLOW(0);
8794 IEM_MC_ENDIF();
8795 IEM_MC_ADVANCE_RIP();
8796
8797 IEM_MC_END();
8798 return VINF_SUCCESS;
8799}
8800
8801
8802/** Opcode 0xdb 11/3. */
8803FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8804{
8805 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8807
8808 IEM_MC_BEGIN(0, 1);
8809 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8810
8811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8813
8814 IEM_MC_PREPARE_FPU_USAGE();
8815 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8816 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8817 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8818 IEM_MC_ENDIF();
8819 IEM_MC_UPDATE_FPU_OPCODE_IP();
8820 IEM_MC_ELSE()
8821 IEM_MC_FPU_STACK_UNDERFLOW(0);
8822 IEM_MC_ENDIF();
8823 IEM_MC_ADVANCE_RIP();
8824
8825 IEM_MC_END();
8826 return VINF_SUCCESS;
8827}
8828
8829
8830/** Opcode 0xdb 0xe0. */
8831FNIEMOP_DEF(iemOp_fneni)
8832{
8833 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8835 IEM_MC_BEGIN(0,0);
8836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8837 IEM_MC_ADVANCE_RIP();
8838 IEM_MC_END();
8839 return VINF_SUCCESS;
8840}
8841
8842
8843/** Opcode 0xdb 0xe1. */
8844FNIEMOP_DEF(iemOp_fndisi)
8845{
8846 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8848 IEM_MC_BEGIN(0,0);
8849 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8850 IEM_MC_ADVANCE_RIP();
8851 IEM_MC_END();
8852 return VINF_SUCCESS;
8853}
8854
8855
8856/** Opcode 0xdb 0xe2. */
8857FNIEMOP_DEF(iemOp_fnclex)
8858{
8859 IEMOP_MNEMONIC(fnclex, "fnclex");
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861
8862 IEM_MC_BEGIN(0,0);
8863 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8864 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8865 IEM_MC_CLEAR_FSW_EX();
8866 IEM_MC_ADVANCE_RIP();
8867 IEM_MC_END();
8868 return VINF_SUCCESS;
8869}
8870
8871
8872/** Opcode 0xdb 0xe3. */
8873FNIEMOP_DEF(iemOp_fninit)
8874{
8875 IEMOP_MNEMONIC(fninit, "fninit");
8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8877 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8878}
8879
8880
8881/** Opcode 0xdb 0xe4. */
8882FNIEMOP_DEF(iemOp_fnsetpm)
8883{
8884 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8886 IEM_MC_BEGIN(0,0);
8887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8888 IEM_MC_ADVANCE_RIP();
8889 IEM_MC_END();
8890 return VINF_SUCCESS;
8891}
8892
8893
8894/** Opcode 0xdb 0xe5. */
8895FNIEMOP_DEF(iemOp_frstpm)
8896{
8897 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8898#if 0 /* #UDs on newer CPUs */
8899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8900 IEM_MC_BEGIN(0,0);
8901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8902 IEM_MC_ADVANCE_RIP();
8903 IEM_MC_END();
8904 return VINF_SUCCESS;
8905#else
8906 return IEMOP_RAISE_INVALID_OPCODE();
8907#endif
8908}
8909
8910
8911/** Opcode 0xdb 11/5. */
8912FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8913{
8914 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8915 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8916}
8917
8918
8919/** Opcode 0xdb 11/6. */
8920FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8921{
8922 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8923 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8924}
8925
8926
8927/**
8928 * @opcode 0xdb
8929 */
8930FNIEMOP_DEF(iemOp_EscF3)
8931{
8932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8933 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8935 {
8936 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8937 {
8938 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8939 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8940 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8941 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8942 case 4:
8943 switch (bRm)
8944 {
8945 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8946 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8947 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8948 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8949 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8950 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8951 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8952 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8954 }
8955 break;
8956 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8957 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8958 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8960 }
8961 }
8962 else
8963 {
8964 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8965 {
8966 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8967 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8968 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8969 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8970 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8971 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8972 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8973 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8975 }
8976 }
8977}
8978
8979
8980/**
8981 * Common worker for FPU instructions working on STn and ST0, and storing the
8982 * result in STn unless IE, DE or ZE was raised.
8983 *
8984 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8985 */
8986FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8987{
8988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8989
8990 IEM_MC_BEGIN(3, 1);
8991 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8992 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8993 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8995
8996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8998
8999 IEM_MC_PREPARE_FPU_USAGE();
9000 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9001 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9002 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9003 IEM_MC_ELSE()
9004 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9005 IEM_MC_ENDIF();
9006 IEM_MC_ADVANCE_RIP();
9007
9008 IEM_MC_END();
9009 return VINF_SUCCESS;
9010}
9011
9012
9013/** Opcode 0xdc 11/0. */
9014FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9015{
9016 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9017 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9018}
9019
9020
9021/** Opcode 0xdc 11/1. */
9022FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9023{
9024 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9025 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9026}
9027
9028
9029/** Opcode 0xdc 11/4. */
9030FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9031{
9032 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9033 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9034}
9035
9036
9037/** Opcode 0xdc 11/5. */
9038FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9039{
9040 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9041 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9042}
9043
9044
9045/** Opcode 0xdc 11/6. */
9046FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9047{
9048 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9049 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9050}
9051
9052
9053/** Opcode 0xdc 11/7. */
9054FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9055{
9056 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9057 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9058}
9059
9060
9061/**
9062 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9063 * memory operand, and storing the result in ST0.
9064 *
9065 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9066 */
9067FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9068{
9069 IEM_MC_BEGIN(3, 3);
9070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9071 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9072 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9073 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9074 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9075 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9076
9077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9081
9082 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9083 IEM_MC_PREPARE_FPU_USAGE();
9084 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9085 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9086 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9087 IEM_MC_ELSE()
9088 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9089 IEM_MC_ENDIF();
9090 IEM_MC_ADVANCE_RIP();
9091
9092 IEM_MC_END();
9093 return VINF_SUCCESS;
9094}
9095
9096
9097/** Opcode 0xdc !11/0. */
9098FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9099{
9100 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9101 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9102}
9103
9104
9105/** Opcode 0xdc !11/1. */
9106FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9107{
9108 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9109 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9110}
9111
9112
9113/** Opcode 0xdc !11/2. */
9114FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9115{
9116 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9117
9118 IEM_MC_BEGIN(3, 3);
9119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9120 IEM_MC_LOCAL(uint16_t, u16Fsw);
9121 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9122 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9123 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9124 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9125
9126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9128
9129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9131 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9132
9133 IEM_MC_PREPARE_FPU_USAGE();
9134 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9135 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9136 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9137 IEM_MC_ELSE()
9138 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9139 IEM_MC_ENDIF();
9140 IEM_MC_ADVANCE_RIP();
9141
9142 IEM_MC_END();
9143 return VINF_SUCCESS;
9144}
9145
9146
9147/** Opcode 0xdc !11/3. */
9148FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9149{
9150 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9151
9152 IEM_MC_BEGIN(3, 3);
9153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9154 IEM_MC_LOCAL(uint16_t, u16Fsw);
9155 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9156 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9157 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9158 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9159
9160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9162
9163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9165 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9166
9167 IEM_MC_PREPARE_FPU_USAGE();
9168 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9169 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9170 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9171 IEM_MC_ELSE()
9172 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9173 IEM_MC_ENDIF();
9174 IEM_MC_ADVANCE_RIP();
9175
9176 IEM_MC_END();
9177 return VINF_SUCCESS;
9178}
9179
9180
9181/** Opcode 0xdc !11/4. */
9182FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9183{
9184 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9185 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9186}
9187
9188
9189/** Opcode 0xdc !11/5. */
9190FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9191{
9192 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9193 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9194}
9195
9196
9197/** Opcode 0xdc !11/6. */
9198FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9199{
9200 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9201 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9202}
9203
9204
9205/** Opcode 0xdc !11/7. */
9206FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9207{
9208 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9209 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9210}
9211
9212
9213/**
9214 * @opcode 0xdc
9215 */
9216FNIEMOP_DEF(iemOp_EscF4)
9217{
9218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9219 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9221 {
9222 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9223 {
9224 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9225 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9226 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9227 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9228 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9229 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9230 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9231 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9233 }
9234 }
9235 else
9236 {
9237 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9238 {
9239 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9240 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9241 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9242 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9243 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9244 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9245 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9246 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9248 }
9249 }
9250}
9251
9252
9253/** Opcode 0xdd !11/0.
9254 * @sa iemOp_fld_m32r */
9255FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9256{
9257 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9258
9259 IEM_MC_BEGIN(2, 3);
9260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9261 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9262 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9263 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9264 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9265
9266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9269 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9270
9271 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9272 IEM_MC_PREPARE_FPU_USAGE();
9273 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9274 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9275 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9276 IEM_MC_ELSE()
9277 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9278 IEM_MC_ENDIF();
9279 IEM_MC_ADVANCE_RIP();
9280
9281 IEM_MC_END();
9282 return VINF_SUCCESS;
9283}
9284
9285
9286/** Opcode 0xdd !11/0. */
9287FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9288{
9289 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9290 IEM_MC_BEGIN(3, 2);
9291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9292 IEM_MC_LOCAL(uint16_t, u16Fsw);
9293 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9294 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9295 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9296
9297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9299 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9300 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9301
9302 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9303 IEM_MC_PREPARE_FPU_USAGE();
9304 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9305 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9306 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9307 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9308 IEM_MC_ELSE()
9309 IEM_MC_IF_FCW_IM()
9310 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9311 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9312 IEM_MC_ENDIF();
9313 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9314 IEM_MC_ENDIF();
9315 IEM_MC_ADVANCE_RIP();
9316
9317 IEM_MC_END();
9318 return VINF_SUCCESS;
9319}
9320
9321
9322/** Opcode 0xdd !11/0. */
9323FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9324{
9325 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9326 IEM_MC_BEGIN(3, 2);
9327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9328 IEM_MC_LOCAL(uint16_t, u16Fsw);
9329 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9330 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9331 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9332
9333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9337
9338 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9339 IEM_MC_PREPARE_FPU_USAGE();
9340 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9341 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9342 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9343 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9344 IEM_MC_ELSE()
9345 IEM_MC_IF_FCW_IM()
9346 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9347 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9348 IEM_MC_ENDIF();
9349 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9350 IEM_MC_ENDIF();
9351 IEM_MC_ADVANCE_RIP();
9352
9353 IEM_MC_END();
9354 return VINF_SUCCESS;
9355}
9356
9357
9358
9359
9360/** Opcode 0xdd !11/0. */
9361FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9362{
9363 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9364 IEM_MC_BEGIN(3, 2);
9365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9366 IEM_MC_LOCAL(uint16_t, u16Fsw);
9367 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9368 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9369 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9370
9371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9375
9376 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9377 IEM_MC_PREPARE_FPU_USAGE();
9378 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9379 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9380 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9381 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9382 IEM_MC_ELSE()
9383 IEM_MC_IF_FCW_IM()
9384 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9385 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9386 IEM_MC_ENDIF();
9387 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9388 IEM_MC_ENDIF();
9389 IEM_MC_ADVANCE_RIP();
9390
9391 IEM_MC_END();
9392 return VINF_SUCCESS;
9393}
9394
9395
9396/** Opcode 0xdd !11/0. */
9397FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9400 IEM_MC_BEGIN(3, 0);
9401 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9402 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9403 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9407 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9408 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9409 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9410 IEM_MC_END();
9411 return VINF_SUCCESS;
9412}
9413
9414
9415/** Opcode 0xdd !11/0. */
9416FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9417{
9418 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9419 IEM_MC_BEGIN(3, 0);
9420 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9421 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9422 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9426 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9428 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9429 IEM_MC_END();
9430 return VINF_SUCCESS;
9431
9432}
9433
9434/** Opcode 0xdd !11/0. */
9435FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9436{
9437 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9438
9439 IEM_MC_BEGIN(0, 2);
9440 IEM_MC_LOCAL(uint16_t, u16Tmp);
9441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9442
9443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9446
9447 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9448 IEM_MC_FETCH_FSW(u16Tmp);
9449 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9450 IEM_MC_ADVANCE_RIP();
9451
9452/** @todo Debug / drop a hint to the verifier that things may differ
9453 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9454 * NT4SP1. (X86_FSW_PE) */
9455 IEM_MC_END();
9456 return VINF_SUCCESS;
9457}
9458
9459
9460/** Opcode 0xdd 11/0. */
9461FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9462{
9463 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9465 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9466 unmodified. */
9467
9468 IEM_MC_BEGIN(0, 0);
9469
9470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9471 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9472
9473 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9474 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9475 IEM_MC_UPDATE_FPU_OPCODE_IP();
9476
9477 IEM_MC_ADVANCE_RIP();
9478 IEM_MC_END();
9479 return VINF_SUCCESS;
9480}
9481
9482
9483/** Opcode 0xdd 11/1. */
9484FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9485{
9486 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9488
9489 IEM_MC_BEGIN(0, 2);
9490 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9491 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9492 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9493 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9494
9495 IEM_MC_PREPARE_FPU_USAGE();
9496 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9497 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9498 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9499 IEM_MC_ELSE()
9500 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9501 IEM_MC_ENDIF();
9502
9503 IEM_MC_ADVANCE_RIP();
9504 IEM_MC_END();
9505 return VINF_SUCCESS;
9506}
9507
9508
9509/** Opcode 0xdd 11/3. */
9510FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9511{
9512 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9513 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9514}
9515
9516
9517/** Opcode 0xdd 11/4. */
9518FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9519{
9520 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9521 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9522}
9523
9524
9525/**
9526 * @opcode 0xdd
9527 */
9528FNIEMOP_DEF(iemOp_EscF5)
9529{
9530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9531 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9533 {
9534 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9535 {
9536 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9537 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9538 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9539 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9540 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9541 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9542 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9543 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9545 }
9546 }
9547 else
9548 {
9549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9550 {
9551 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9552 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9553 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9554 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9555 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9556 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9557 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9558 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9560 }
9561 }
9562}
9563
9564
9565/** Opcode 0xde 11/0. */
9566FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9567{
9568 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9569 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9570}
9571
9572
9573/** Opcode 0xde 11/0. */
9574FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9575{
9576 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9577 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9578}
9579
9580
9581/** Opcode 0xde 0xd9. */
9582FNIEMOP_DEF(iemOp_fcompp)
9583{
9584 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9585 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9586}
9587
9588
9589/** Opcode 0xde 11/4. */
9590FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9591{
9592 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9593 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9594}
9595
9596
9597/** Opcode 0xde 11/5. */
9598FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9599{
9600 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9601 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9602}
9603
9604
9605/** Opcode 0xde 11/6. */
9606FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9607{
9608 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9609 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9610}
9611
9612
9613/** Opcode 0xde 11/7. */
9614FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9615{
9616 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9617 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9618}
9619
9620
9621/**
9622 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9623 * the result in ST0.
9624 *
9625 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9626 */
9627FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9628{
9629 IEM_MC_BEGIN(3, 3);
9630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9631 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9632 IEM_MC_LOCAL(int16_t, i16Val2);
9633 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9634 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9635 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9636
9637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9639
9640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9642 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9643
9644 IEM_MC_PREPARE_FPU_USAGE();
9645 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9646 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9647 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9648 IEM_MC_ELSE()
9649 IEM_MC_FPU_STACK_UNDERFLOW(0);
9650 IEM_MC_ENDIF();
9651 IEM_MC_ADVANCE_RIP();
9652
9653 IEM_MC_END();
9654 return VINF_SUCCESS;
9655}
9656
9657
9658/** Opcode 0xde !11/0. */
9659FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9660{
9661 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9662 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9663}
9664
9665
9666/** Opcode 0xde !11/1. */
9667FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9668{
9669 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9670 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9671}
9672
9673
9674/** Opcode 0xde !11/2. */
9675FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9676{
9677 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9678
9679 IEM_MC_BEGIN(3, 3);
9680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9681 IEM_MC_LOCAL(uint16_t, u16Fsw);
9682 IEM_MC_LOCAL(int16_t, i16Val2);
9683 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9684 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9685 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9686
9687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9689
9690 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9692 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9693
9694 IEM_MC_PREPARE_FPU_USAGE();
9695 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9696 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9697 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9698 IEM_MC_ELSE()
9699 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9700 IEM_MC_ENDIF();
9701 IEM_MC_ADVANCE_RIP();
9702
9703 IEM_MC_END();
9704 return VINF_SUCCESS;
9705}
9706
9707
9708/** Opcode 0xde !11/3. */
9709FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9710{
9711 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9712
9713 IEM_MC_BEGIN(3, 3);
9714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9715 IEM_MC_LOCAL(uint16_t, u16Fsw);
9716 IEM_MC_LOCAL(int16_t, i16Val2);
9717 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9718 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9719 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9720
9721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9723
9724 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9725 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9726 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9727
9728 IEM_MC_PREPARE_FPU_USAGE();
9729 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9730 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9731 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9732 IEM_MC_ELSE()
9733 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9734 IEM_MC_ENDIF();
9735 IEM_MC_ADVANCE_RIP();
9736
9737 IEM_MC_END();
9738 return VINF_SUCCESS;
9739}
9740
9741
9742/** Opcode 0xde !11/4. */
9743FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9744{
9745 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9746 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9747}
9748
9749
9750/** Opcode 0xde !11/5. */
9751FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9752{
9753 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9754 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9755}
9756
9757
9758/** Opcode 0xde !11/6. */
9759FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9760{
9761 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9762 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9763}
9764
9765
9766/** Opcode 0xde !11/7. */
9767FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9768{
9769 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9770 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9771}
9772
9773
9774/**
9775 * @opcode 0xde
9776 */
9777FNIEMOP_DEF(iemOp_EscF6)
9778{
9779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9780 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9782 {
9783 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9784 {
9785 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9786 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9787 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9788 case 3: if (bRm == 0xd9)
9789 return FNIEMOP_CALL(iemOp_fcompp);
9790 return IEMOP_RAISE_INVALID_OPCODE();
9791 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9792 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9793 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9794 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9796 }
9797 }
9798 else
9799 {
9800 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9801 {
9802 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9803 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9804 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9805 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9806 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9807 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9808 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9809 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9811 }
9812 }
9813}
9814
9815
9816/** Opcode 0xdf 11/0.
9817 * Undocument instruction, assumed to work like ffree + fincstp. */
9818FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9819{
9820 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9822
9823 IEM_MC_BEGIN(0, 0);
9824
9825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9827
9828 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9829 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9830 IEM_MC_FPU_STACK_INC_TOP();
9831 IEM_MC_UPDATE_FPU_OPCODE_IP();
9832
9833 IEM_MC_ADVANCE_RIP();
9834 IEM_MC_END();
9835 return VINF_SUCCESS;
9836}
9837
9838
9839/** Opcode 0xdf 0xe0. */
9840FNIEMOP_DEF(iemOp_fnstsw_ax)
9841{
9842 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9844
9845 IEM_MC_BEGIN(0, 1);
9846 IEM_MC_LOCAL(uint16_t, u16Tmp);
9847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9848 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9849 IEM_MC_FETCH_FSW(u16Tmp);
9850 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9851 IEM_MC_ADVANCE_RIP();
9852 IEM_MC_END();
9853 return VINF_SUCCESS;
9854}
9855
9856
9857/** Opcode 0xdf 11/5. */
9858FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9859{
9860 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9861 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9862}
9863
9864
9865/** Opcode 0xdf 11/6. */
9866FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9867{
9868 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9869 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9870}
9871
9872
9873/** Opcode 0xdf !11/0. */
9874FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9875{
9876 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9877
9878 IEM_MC_BEGIN(2, 3);
9879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9880 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9881 IEM_MC_LOCAL(int16_t, i16Val);
9882 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9883 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9884
9885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9887
9888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9890 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9891
9892 IEM_MC_PREPARE_FPU_USAGE();
9893 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9894 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9895 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9896 IEM_MC_ELSE()
9897 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9898 IEM_MC_ENDIF();
9899 IEM_MC_ADVANCE_RIP();
9900
9901 IEM_MC_END();
9902 return VINF_SUCCESS;
9903}
9904
9905
9906/** Opcode 0xdf !11/1. */
9907FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9908{
9909 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9910 IEM_MC_BEGIN(3, 2);
9911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9912 IEM_MC_LOCAL(uint16_t, u16Fsw);
9913 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9914 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9915 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9916
9917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9920 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9921
9922 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9923 IEM_MC_PREPARE_FPU_USAGE();
9924 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9925 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9926 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9927 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9928 IEM_MC_ELSE()
9929 IEM_MC_IF_FCW_IM()
9930 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9931 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9932 IEM_MC_ENDIF();
9933 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9934 IEM_MC_ENDIF();
9935 IEM_MC_ADVANCE_RIP();
9936
9937 IEM_MC_END();
9938 return VINF_SUCCESS;
9939}
9940
9941
9942/** Opcode 0xdf !11/2. */
9943FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9944{
9945 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9946 IEM_MC_BEGIN(3, 2);
9947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9948 IEM_MC_LOCAL(uint16_t, u16Fsw);
9949 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9950 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9951 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9952
9953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9957
9958 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9959 IEM_MC_PREPARE_FPU_USAGE();
9960 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9961 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9962 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9963 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9964 IEM_MC_ELSE()
9965 IEM_MC_IF_FCW_IM()
9966 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9967 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9968 IEM_MC_ENDIF();
9969 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9970 IEM_MC_ENDIF();
9971 IEM_MC_ADVANCE_RIP();
9972
9973 IEM_MC_END();
9974 return VINF_SUCCESS;
9975}
9976
9977
9978/** Opcode 0xdf !11/3. */
9979FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9980{
9981 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9982 IEM_MC_BEGIN(3, 2);
9983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9984 IEM_MC_LOCAL(uint16_t, u16Fsw);
9985 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9986 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9987 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9988
9989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9991 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9992 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9993
9994 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9995 IEM_MC_PREPARE_FPU_USAGE();
9996 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9997 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9998 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9999 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10000 IEM_MC_ELSE()
10001 IEM_MC_IF_FCW_IM()
10002 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10003 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10004 IEM_MC_ENDIF();
10005 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10006 IEM_MC_ENDIF();
10007 IEM_MC_ADVANCE_RIP();
10008
10009 IEM_MC_END();
10010 return VINF_SUCCESS;
10011}
10012
10013
10014/** Opcode 0xdf !11/4. */
10015FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10016
10017
10018/** Opcode 0xdf !11/5. */
10019FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10020{
10021 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10022
10023 IEM_MC_BEGIN(2, 3);
10024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10025 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10026 IEM_MC_LOCAL(int64_t, i64Val);
10027 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10028 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10029
10030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10032
10033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10035 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10036
10037 IEM_MC_PREPARE_FPU_USAGE();
10038 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10039 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10040 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10041 IEM_MC_ELSE()
10042 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10043 IEM_MC_ENDIF();
10044 IEM_MC_ADVANCE_RIP();
10045
10046 IEM_MC_END();
10047 return VINF_SUCCESS;
10048}
10049
10050
10051/** Opcode 0xdf !11/6. */
10052FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
10053
10054
10055/** Opcode 0xdf !11/7. */
10056FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10057{
10058 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10059 IEM_MC_BEGIN(3, 2);
10060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10061 IEM_MC_LOCAL(uint16_t, u16Fsw);
10062 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10063 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10065
10066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10070
10071 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10072 IEM_MC_PREPARE_FPU_USAGE();
10073 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10074 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10075 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10076 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10077 IEM_MC_ELSE()
10078 IEM_MC_IF_FCW_IM()
10079 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10080 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10081 IEM_MC_ENDIF();
10082 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10083 IEM_MC_ENDIF();
10084 IEM_MC_ADVANCE_RIP();
10085
10086 IEM_MC_END();
10087 return VINF_SUCCESS;
10088}
10089
10090
10091/**
10092 * @opcode 0xdf
10093 */
10094FNIEMOP_DEF(iemOp_EscF7)
10095{
10096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10098 {
10099 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10100 {
10101 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10102 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10103 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10104 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10105 case 4: if (bRm == 0xe0)
10106 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10107 return IEMOP_RAISE_INVALID_OPCODE();
10108 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10109 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10110 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10112 }
10113 }
10114 else
10115 {
10116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10117 {
10118 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10119 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10120 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10121 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10122 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10123 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10124 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10125 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10127 }
10128 }
10129}
10130
10131
10132/**
10133 * @opcode 0xe0
10134 */
10135FNIEMOP_DEF(iemOp_loopne_Jb)
10136{
10137 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10138 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10140 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10141
10142 switch (pVCpu->iem.s.enmEffAddrMode)
10143 {
10144 case IEMMODE_16BIT:
10145 IEM_MC_BEGIN(0,0);
10146 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10147 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10148 IEM_MC_REL_JMP_S8(i8Imm);
10149 } IEM_MC_ELSE() {
10150 IEM_MC_ADVANCE_RIP();
10151 } IEM_MC_ENDIF();
10152 IEM_MC_END();
10153 return VINF_SUCCESS;
10154
10155 case IEMMODE_32BIT:
10156 IEM_MC_BEGIN(0,0);
10157 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10158 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10159 IEM_MC_REL_JMP_S8(i8Imm);
10160 } IEM_MC_ELSE() {
10161 IEM_MC_ADVANCE_RIP();
10162 } IEM_MC_ENDIF();
10163 IEM_MC_END();
10164 return VINF_SUCCESS;
10165
10166 case IEMMODE_64BIT:
10167 IEM_MC_BEGIN(0,0);
10168 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10169 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10170 IEM_MC_REL_JMP_S8(i8Imm);
10171 } IEM_MC_ELSE() {
10172 IEM_MC_ADVANCE_RIP();
10173 } IEM_MC_ENDIF();
10174 IEM_MC_END();
10175 return VINF_SUCCESS;
10176
10177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10178 }
10179}
10180
10181
10182/**
10183 * @opcode 0xe1
10184 */
10185FNIEMOP_DEF(iemOp_loope_Jb)
10186{
10187 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10188 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10191
10192 switch (pVCpu->iem.s.enmEffAddrMode)
10193 {
10194 case IEMMODE_16BIT:
10195 IEM_MC_BEGIN(0,0);
10196 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10197 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10198 IEM_MC_REL_JMP_S8(i8Imm);
10199 } IEM_MC_ELSE() {
10200 IEM_MC_ADVANCE_RIP();
10201 } IEM_MC_ENDIF();
10202 IEM_MC_END();
10203 return VINF_SUCCESS;
10204
10205 case IEMMODE_32BIT:
10206 IEM_MC_BEGIN(0,0);
10207 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10208 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10209 IEM_MC_REL_JMP_S8(i8Imm);
10210 } IEM_MC_ELSE() {
10211 IEM_MC_ADVANCE_RIP();
10212 } IEM_MC_ENDIF();
10213 IEM_MC_END();
10214 return VINF_SUCCESS;
10215
10216 case IEMMODE_64BIT:
10217 IEM_MC_BEGIN(0,0);
10218 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10219 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10220 IEM_MC_REL_JMP_S8(i8Imm);
10221 } IEM_MC_ELSE() {
10222 IEM_MC_ADVANCE_RIP();
10223 } IEM_MC_ENDIF();
10224 IEM_MC_END();
10225 return VINF_SUCCESS;
10226
10227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10228 }
10229}
10230
10231
10232/**
10233 * @opcode 0xe2
10234 */
10235FNIEMOP_DEF(iemOp_loop_Jb)
10236{
10237 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10238 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10240 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10241
10242 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10243 * using the 32-bit operand size override. How can that be restarted? See
10244 * weird pseudo code in intel manual. */
10245 switch (pVCpu->iem.s.enmEffAddrMode)
10246 {
10247 case IEMMODE_16BIT:
10248 IEM_MC_BEGIN(0,0);
10249 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10250 {
10251 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10252 IEM_MC_IF_CX_IS_NZ() {
10253 IEM_MC_REL_JMP_S8(i8Imm);
10254 } IEM_MC_ELSE() {
10255 IEM_MC_ADVANCE_RIP();
10256 } IEM_MC_ENDIF();
10257 }
10258 else
10259 {
10260 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10261 IEM_MC_ADVANCE_RIP();
10262 }
10263 IEM_MC_END();
10264 return VINF_SUCCESS;
10265
10266 case IEMMODE_32BIT:
10267 IEM_MC_BEGIN(0,0);
10268 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10269 {
10270 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10271 IEM_MC_IF_ECX_IS_NZ() {
10272 IEM_MC_REL_JMP_S8(i8Imm);
10273 } IEM_MC_ELSE() {
10274 IEM_MC_ADVANCE_RIP();
10275 } IEM_MC_ENDIF();
10276 }
10277 else
10278 {
10279 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10280 IEM_MC_ADVANCE_RIP();
10281 }
10282 IEM_MC_END();
10283 return VINF_SUCCESS;
10284
10285 case IEMMODE_64BIT:
10286 IEM_MC_BEGIN(0,0);
10287 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10288 {
10289 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10290 IEM_MC_IF_RCX_IS_NZ() {
10291 IEM_MC_REL_JMP_S8(i8Imm);
10292 } IEM_MC_ELSE() {
10293 IEM_MC_ADVANCE_RIP();
10294 } IEM_MC_ENDIF();
10295 }
10296 else
10297 {
10298 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10299 IEM_MC_ADVANCE_RIP();
10300 }
10301 IEM_MC_END();
10302 return VINF_SUCCESS;
10303
10304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10305 }
10306}
10307
10308
10309/**
10310 * @opcode 0xe3
10311 */
10312FNIEMOP_DEF(iemOp_jecxz_Jb)
10313{
10314 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10315 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10318
10319 switch (pVCpu->iem.s.enmEffAddrMode)
10320 {
10321 case IEMMODE_16BIT:
10322 IEM_MC_BEGIN(0,0);
10323 IEM_MC_IF_CX_IS_NZ() {
10324 IEM_MC_ADVANCE_RIP();
10325 } IEM_MC_ELSE() {
10326 IEM_MC_REL_JMP_S8(i8Imm);
10327 } IEM_MC_ENDIF();
10328 IEM_MC_END();
10329 return VINF_SUCCESS;
10330
10331 case IEMMODE_32BIT:
10332 IEM_MC_BEGIN(0,0);
10333 IEM_MC_IF_ECX_IS_NZ() {
10334 IEM_MC_ADVANCE_RIP();
10335 } IEM_MC_ELSE() {
10336 IEM_MC_REL_JMP_S8(i8Imm);
10337 } IEM_MC_ENDIF();
10338 IEM_MC_END();
10339 return VINF_SUCCESS;
10340
10341 case IEMMODE_64BIT:
10342 IEM_MC_BEGIN(0,0);
10343 IEM_MC_IF_RCX_IS_NZ() {
10344 IEM_MC_ADVANCE_RIP();
10345 } IEM_MC_ELSE() {
10346 IEM_MC_REL_JMP_S8(i8Imm);
10347 } IEM_MC_ENDIF();
10348 IEM_MC_END();
10349 return VINF_SUCCESS;
10350
10351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10352 }
10353}
10354
10355
10356/** Opcode 0xe4 */
10357FNIEMOP_DEF(iemOp_in_AL_Ib)
10358{
10359 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10360 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10362 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10363}
10364
10365
10366/** Opcode 0xe5 */
10367FNIEMOP_DEF(iemOp_in_eAX_Ib)
10368{
10369 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10370 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10372 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10373}
10374
10375
10376/** Opcode 0xe6 */
10377FNIEMOP_DEF(iemOp_out_Ib_AL)
10378{
10379 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10380 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10382 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10383}
10384
10385
10386/** Opcode 0xe7 */
10387FNIEMOP_DEF(iemOp_out_Ib_eAX)
10388{
10389 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10390 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10392 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10393}
10394
10395
10396/**
10397 * @opcode 0xe8
10398 */
10399FNIEMOP_DEF(iemOp_call_Jv)
10400{
10401 IEMOP_MNEMONIC(call_Jv, "call Jv");
10402 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10403 switch (pVCpu->iem.s.enmEffOpSize)
10404 {
10405 case IEMMODE_16BIT:
10406 {
10407 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10408 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10409 }
10410
10411 case IEMMODE_32BIT:
10412 {
10413 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10414 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10415 }
10416
10417 case IEMMODE_64BIT:
10418 {
10419 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10420 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10421 }
10422
10423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10424 }
10425}
10426
10427
10428/**
10429 * @opcode 0xe9
10430 */
10431FNIEMOP_DEF(iemOp_jmp_Jv)
10432{
10433 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10434 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10435 switch (pVCpu->iem.s.enmEffOpSize)
10436 {
10437 case IEMMODE_16BIT:
10438 {
10439 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10440 IEM_MC_BEGIN(0, 0);
10441 IEM_MC_REL_JMP_S16(i16Imm);
10442 IEM_MC_END();
10443 return VINF_SUCCESS;
10444 }
10445
10446 case IEMMODE_64BIT:
10447 case IEMMODE_32BIT:
10448 {
10449 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10450 IEM_MC_BEGIN(0, 0);
10451 IEM_MC_REL_JMP_S32(i32Imm);
10452 IEM_MC_END();
10453 return VINF_SUCCESS;
10454 }
10455
10456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10457 }
10458}
10459
10460
10461/**
10462 * @opcode 0xea
10463 */
10464FNIEMOP_DEF(iemOp_jmp_Ap)
10465{
10466 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10467 IEMOP_HLP_NO_64BIT();
10468
10469 /* Decode the far pointer address and pass it on to the far call C implementation. */
10470 uint32_t offSeg;
10471 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10472 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10473 else
10474 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10475 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10478}
10479
10480
10481/**
10482 * @opcode 0xeb
10483 */
10484FNIEMOP_DEF(iemOp_jmp_Jb)
10485{
10486 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10487 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10490
10491 IEM_MC_BEGIN(0, 0);
10492 IEM_MC_REL_JMP_S8(i8Imm);
10493 IEM_MC_END();
10494 return VINF_SUCCESS;
10495}
10496
10497
10498/** Opcode 0xec */
10499FNIEMOP_DEF(iemOp_in_AL_DX)
10500{
10501 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10503 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10504}
10505
10506
10507/** Opcode 0xed */
10508FNIEMOP_DEF(iemOp_eAX_DX)
10509{
10510 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10512 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10513}
10514
10515
10516/** Opcode 0xee */
10517FNIEMOP_DEF(iemOp_out_DX_AL)
10518{
10519 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10522}
10523
10524
10525/** Opcode 0xef */
10526FNIEMOP_DEF(iemOp_out_DX_eAX)
10527{
10528 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10531}
10532
10533
10534/**
10535 * @opcode 0xf0
10536 */
10537FNIEMOP_DEF(iemOp_lock)
10538{
10539 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10540 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10541
10542 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10543 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10544}
10545
10546
10547/**
10548 * @opcode 0xf1
10549 */
10550FNIEMOP_DEF(iemOp_int1)
10551{
10552 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10553 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10554 /** @todo testcase! */
10555 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10556}
10557
10558
10559/**
10560 * @opcode 0xf2
10561 */
10562FNIEMOP_DEF(iemOp_repne)
10563{
10564 /* This overrides any previous REPE prefix. */
10565 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10566 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10567 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10568
10569 /* For the 4 entry opcode tables, REPNZ overrides any previous
10570 REPZ and operand size prefixes. */
10571 pVCpu->iem.s.idxPrefix = 3;
10572
10573 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10574 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10575}
10576
10577
10578/**
10579 * @opcode 0xf3
10580 */
10581FNIEMOP_DEF(iemOp_repe)
10582{
10583 /* This overrides any previous REPNE prefix. */
10584 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10585 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10586 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10587
10588 /* For the 4 entry opcode tables, REPNZ overrides any previous
10589 REPNZ and operand size prefixes. */
10590 pVCpu->iem.s.idxPrefix = 2;
10591
10592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10593 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10594}
10595
10596
10597/**
10598 * @opcode 0xf4
10599 */
10600FNIEMOP_DEF(iemOp_hlt)
10601{
10602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10603 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10604}
10605
10606
10607/**
10608 * @opcode 0xf5
10609 */
10610FNIEMOP_DEF(iemOp_cmc)
10611{
10612 IEMOP_MNEMONIC(cmc, "cmc");
10613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10614 IEM_MC_BEGIN(0, 0);
10615 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10616 IEM_MC_ADVANCE_RIP();
10617 IEM_MC_END();
10618 return VINF_SUCCESS;
10619}
10620
10621
10622/**
10623 * Common implementation of 'inc/dec/not/neg Eb'.
10624 *
10625 * @param bRm The RM byte.
10626 * @param pImpl The instruction implementation.
10627 */
10628FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10629{
10630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10631 {
10632 /* register access */
10633 IEM_MC_BEGIN(2, 0);
10634 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10635 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10636 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10637 IEM_MC_REF_EFLAGS(pEFlags);
10638 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10639 IEM_MC_ADVANCE_RIP();
10640 IEM_MC_END();
10641 }
10642 else
10643 {
10644 /* memory access. */
10645 IEM_MC_BEGIN(2, 2);
10646 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10647 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10649
10650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10651 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10652 IEM_MC_FETCH_EFLAGS(EFlags);
10653 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10654 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10655 else
10656 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10657
10658 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10659 IEM_MC_COMMIT_EFLAGS(EFlags);
10660 IEM_MC_ADVANCE_RIP();
10661 IEM_MC_END();
10662 }
10663 return VINF_SUCCESS;
10664}
10665
10666
10667/**
10668 * Common implementation of 'inc/dec/not/neg Ev'.
10669 *
10670 * @param bRm The RM byte.
10671 * @param pImpl The instruction implementation.
10672 */
10673FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10674{
10675 /* Registers are handled by a common worker. */
10676 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10677 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10678
10679 /* Memory we do here. */
10680 switch (pVCpu->iem.s.enmEffOpSize)
10681 {
10682 case IEMMODE_16BIT:
10683 IEM_MC_BEGIN(2, 2);
10684 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10685 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10687
10688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10689 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10690 IEM_MC_FETCH_EFLAGS(EFlags);
10691 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10692 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10693 else
10694 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10695
10696 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10697 IEM_MC_COMMIT_EFLAGS(EFlags);
10698 IEM_MC_ADVANCE_RIP();
10699 IEM_MC_END();
10700 return VINF_SUCCESS;
10701
10702 case IEMMODE_32BIT:
10703 IEM_MC_BEGIN(2, 2);
10704 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10707
10708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10709 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10710 IEM_MC_FETCH_EFLAGS(EFlags);
10711 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10712 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10713 else
10714 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10715
10716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10717 IEM_MC_COMMIT_EFLAGS(EFlags);
10718 IEM_MC_ADVANCE_RIP();
10719 IEM_MC_END();
10720 return VINF_SUCCESS;
10721
10722 case IEMMODE_64BIT:
10723 IEM_MC_BEGIN(2, 2);
10724 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10725 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10727
10728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10729 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10730 IEM_MC_FETCH_EFLAGS(EFlags);
10731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10732 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10733 else
10734 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10735
10736 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10737 IEM_MC_COMMIT_EFLAGS(EFlags);
10738 IEM_MC_ADVANCE_RIP();
10739 IEM_MC_END();
10740 return VINF_SUCCESS;
10741
10742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10743 }
10744}
10745
10746
10747/** Opcode 0xf6 /0. */
10748FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10749{
10750 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10751 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10752
10753 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10754 {
10755 /* register access */
10756 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10758
10759 IEM_MC_BEGIN(3, 0);
10760 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10761 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10762 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10763 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10764 IEM_MC_REF_EFLAGS(pEFlags);
10765 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10766 IEM_MC_ADVANCE_RIP();
10767 IEM_MC_END();
10768 }
10769 else
10770 {
10771 /* memory access. */
10772 IEM_MC_BEGIN(3, 2);
10773 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10774 IEM_MC_ARG(uint8_t, u8Src, 1);
10775 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10777
10778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10779 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10780 IEM_MC_ASSIGN(u8Src, u8Imm);
10781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10782 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10783 IEM_MC_FETCH_EFLAGS(EFlags);
10784 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10785
10786 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10787 IEM_MC_COMMIT_EFLAGS(EFlags);
10788 IEM_MC_ADVANCE_RIP();
10789 IEM_MC_END();
10790 }
10791 return VINF_SUCCESS;
10792}
10793
10794
10795/** Opcode 0xf7 /0. */
10796FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10797{
10798 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10800
10801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10802 {
10803 /* register access */
10804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10805 switch (pVCpu->iem.s.enmEffOpSize)
10806 {
10807 case IEMMODE_16BIT:
10808 {
10809 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10810 IEM_MC_BEGIN(3, 0);
10811 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10812 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10813 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10814 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10815 IEM_MC_REF_EFLAGS(pEFlags);
10816 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10817 IEM_MC_ADVANCE_RIP();
10818 IEM_MC_END();
10819 return VINF_SUCCESS;
10820 }
10821
10822 case IEMMODE_32BIT:
10823 {
10824 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10825 IEM_MC_BEGIN(3, 0);
10826 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10827 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10828 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10829 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10830 IEM_MC_REF_EFLAGS(pEFlags);
10831 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10832 /* No clearing the high dword here - test doesn't write back the result. */
10833 IEM_MC_ADVANCE_RIP();
10834 IEM_MC_END();
10835 return VINF_SUCCESS;
10836 }
10837
10838 case IEMMODE_64BIT:
10839 {
10840 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10841 IEM_MC_BEGIN(3, 0);
10842 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10843 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10844 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10845 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10846 IEM_MC_REF_EFLAGS(pEFlags);
10847 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10848 IEM_MC_ADVANCE_RIP();
10849 IEM_MC_END();
10850 return VINF_SUCCESS;
10851 }
10852
10853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10854 }
10855 }
10856 else
10857 {
10858 /* memory access. */
10859 switch (pVCpu->iem.s.enmEffOpSize)
10860 {
10861 case IEMMODE_16BIT:
10862 {
10863 IEM_MC_BEGIN(3, 2);
10864 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10865 IEM_MC_ARG(uint16_t, u16Src, 1);
10866 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10868
10869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10870 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10871 IEM_MC_ASSIGN(u16Src, u16Imm);
10872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10873 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10874 IEM_MC_FETCH_EFLAGS(EFlags);
10875 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10876
10877 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10878 IEM_MC_COMMIT_EFLAGS(EFlags);
10879 IEM_MC_ADVANCE_RIP();
10880 IEM_MC_END();
10881 return VINF_SUCCESS;
10882 }
10883
10884 case IEMMODE_32BIT:
10885 {
10886 IEM_MC_BEGIN(3, 2);
10887 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10888 IEM_MC_ARG(uint32_t, u32Src, 1);
10889 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10891
10892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10893 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10894 IEM_MC_ASSIGN(u32Src, u32Imm);
10895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10896 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10897 IEM_MC_FETCH_EFLAGS(EFlags);
10898 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10899
10900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10901 IEM_MC_COMMIT_EFLAGS(EFlags);
10902 IEM_MC_ADVANCE_RIP();
10903 IEM_MC_END();
10904 return VINF_SUCCESS;
10905 }
10906
10907 case IEMMODE_64BIT:
10908 {
10909 IEM_MC_BEGIN(3, 2);
10910 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10911 IEM_MC_ARG(uint64_t, u64Src, 1);
10912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10914
10915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10916 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10917 IEM_MC_ASSIGN(u64Src, u64Imm);
10918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10919 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10920 IEM_MC_FETCH_EFLAGS(EFlags);
10921 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10922
10923 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10924 IEM_MC_COMMIT_EFLAGS(EFlags);
10925 IEM_MC_ADVANCE_RIP();
10926 IEM_MC_END();
10927 return VINF_SUCCESS;
10928 }
10929
10930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10931 }
10932 }
10933}
10934
10935
10936/** Opcode 0xf6 /4, /5, /6 and /7. */
10937FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10938{
10939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10940 {
10941 /* register access */
10942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10943 IEM_MC_BEGIN(3, 1);
10944 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10945 IEM_MC_ARG(uint8_t, u8Value, 1);
10946 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10947 IEM_MC_LOCAL(int32_t, rc);
10948
10949 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10950 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10951 IEM_MC_REF_EFLAGS(pEFlags);
10952 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10953 IEM_MC_IF_LOCAL_IS_Z(rc) {
10954 IEM_MC_ADVANCE_RIP();
10955 } IEM_MC_ELSE() {
10956 IEM_MC_RAISE_DIVIDE_ERROR();
10957 } IEM_MC_ENDIF();
10958
10959 IEM_MC_END();
10960 }
10961 else
10962 {
10963 /* memory access. */
10964 IEM_MC_BEGIN(3, 2);
10965 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10966 IEM_MC_ARG(uint8_t, u8Value, 1);
10967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10969 IEM_MC_LOCAL(int32_t, rc);
10970
10971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10973 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10974 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10975 IEM_MC_REF_EFLAGS(pEFlags);
10976 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10977 IEM_MC_IF_LOCAL_IS_Z(rc) {
10978 IEM_MC_ADVANCE_RIP();
10979 } IEM_MC_ELSE() {
10980 IEM_MC_RAISE_DIVIDE_ERROR();
10981 } IEM_MC_ENDIF();
10982
10983 IEM_MC_END();
10984 }
10985 return VINF_SUCCESS;
10986}
10987
10988
10989/** Opcode 0xf7 /4, /5, /6 and /7. */
10990FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10991{
10992 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10993
10994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10995 {
10996 /* register access */
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 switch (pVCpu->iem.s.enmEffOpSize)
10999 {
11000 case IEMMODE_16BIT:
11001 {
11002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11003 IEM_MC_BEGIN(4, 1);
11004 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11005 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11006 IEM_MC_ARG(uint16_t, u16Value, 2);
11007 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11008 IEM_MC_LOCAL(int32_t, rc);
11009
11010 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11011 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11012 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11013 IEM_MC_REF_EFLAGS(pEFlags);
11014 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11015 IEM_MC_IF_LOCAL_IS_Z(rc) {
11016 IEM_MC_ADVANCE_RIP();
11017 } IEM_MC_ELSE() {
11018 IEM_MC_RAISE_DIVIDE_ERROR();
11019 } IEM_MC_ENDIF();
11020
11021 IEM_MC_END();
11022 return VINF_SUCCESS;
11023 }
11024
11025 case IEMMODE_32BIT:
11026 {
11027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11028 IEM_MC_BEGIN(4, 1);
11029 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11030 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11031 IEM_MC_ARG(uint32_t, u32Value, 2);
11032 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11033 IEM_MC_LOCAL(int32_t, rc);
11034
11035 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11036 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11037 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11038 IEM_MC_REF_EFLAGS(pEFlags);
11039 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11040 IEM_MC_IF_LOCAL_IS_Z(rc) {
11041 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11042 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11043 IEM_MC_ADVANCE_RIP();
11044 } IEM_MC_ELSE() {
11045 IEM_MC_RAISE_DIVIDE_ERROR();
11046 } IEM_MC_ENDIF();
11047
11048 IEM_MC_END();
11049 return VINF_SUCCESS;
11050 }
11051
11052 case IEMMODE_64BIT:
11053 {
11054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11055 IEM_MC_BEGIN(4, 1);
11056 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11057 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11058 IEM_MC_ARG(uint64_t, u64Value, 2);
11059 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11060 IEM_MC_LOCAL(int32_t, rc);
11061
11062 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11063 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11064 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11065 IEM_MC_REF_EFLAGS(pEFlags);
11066 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11067 IEM_MC_IF_LOCAL_IS_Z(rc) {
11068 IEM_MC_ADVANCE_RIP();
11069 } IEM_MC_ELSE() {
11070 IEM_MC_RAISE_DIVIDE_ERROR();
11071 } IEM_MC_ENDIF();
11072
11073 IEM_MC_END();
11074 return VINF_SUCCESS;
11075 }
11076
11077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11078 }
11079 }
11080 else
11081 {
11082 /* memory access. */
11083 switch (pVCpu->iem.s.enmEffOpSize)
11084 {
11085 case IEMMODE_16BIT:
11086 {
11087 IEM_MC_BEGIN(4, 2);
11088 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11089 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11090 IEM_MC_ARG(uint16_t, u16Value, 2);
11091 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11093 IEM_MC_LOCAL(int32_t, rc);
11094
11095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11097 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11098 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11099 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11100 IEM_MC_REF_EFLAGS(pEFlags);
11101 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11102 IEM_MC_IF_LOCAL_IS_Z(rc) {
11103 IEM_MC_ADVANCE_RIP();
11104 } IEM_MC_ELSE() {
11105 IEM_MC_RAISE_DIVIDE_ERROR();
11106 } IEM_MC_ENDIF();
11107
11108 IEM_MC_END();
11109 return VINF_SUCCESS;
11110 }
11111
11112 case IEMMODE_32BIT:
11113 {
11114 IEM_MC_BEGIN(4, 2);
11115 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11116 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11117 IEM_MC_ARG(uint32_t, u32Value, 2);
11118 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11120 IEM_MC_LOCAL(int32_t, rc);
11121
11122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11124 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11125 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11126 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11127 IEM_MC_REF_EFLAGS(pEFlags);
11128 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11129 IEM_MC_IF_LOCAL_IS_Z(rc) {
11130 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11131 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11132 IEM_MC_ADVANCE_RIP();
11133 } IEM_MC_ELSE() {
11134 IEM_MC_RAISE_DIVIDE_ERROR();
11135 } IEM_MC_ENDIF();
11136
11137 IEM_MC_END();
11138 return VINF_SUCCESS;
11139 }
11140
11141 case IEMMODE_64BIT:
11142 {
11143 IEM_MC_BEGIN(4, 2);
11144 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11145 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11146 IEM_MC_ARG(uint64_t, u64Value, 2);
11147 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11149 IEM_MC_LOCAL(int32_t, rc);
11150
11151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11154 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11155 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11156 IEM_MC_REF_EFLAGS(pEFlags);
11157 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11158 IEM_MC_IF_LOCAL_IS_Z(rc) {
11159 IEM_MC_ADVANCE_RIP();
11160 } IEM_MC_ELSE() {
11161 IEM_MC_RAISE_DIVIDE_ERROR();
11162 } IEM_MC_ENDIF();
11163
11164 IEM_MC_END();
11165 return VINF_SUCCESS;
11166 }
11167
11168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11169 }
11170 }
11171}
11172
11173/**
11174 * @opcode 0xf6
11175 */
11176FNIEMOP_DEF(iemOp_Grp3_Eb)
11177{
11178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11179 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11180 {
11181 case 0:
11182 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11183 case 1:
11184/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11185 return IEMOP_RAISE_INVALID_OPCODE();
11186 case 2:
11187 IEMOP_MNEMONIC(not_Eb, "not Eb");
11188 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11189 case 3:
11190 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11191 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11192 case 4:
11193 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11195 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11196 case 5:
11197 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11198 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11199 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11200 case 6:
11201 IEMOP_MNEMONIC(div_Eb, "div Eb");
11202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11203 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11204 case 7:
11205 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11207 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11209 }
11210}
11211
11212
11213/**
11214 * @opcode 0xf7
11215 */
11216FNIEMOP_DEF(iemOp_Grp3_Ev)
11217{
11218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11219 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11220 {
11221 case 0:
11222 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11223 case 1:
11224/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11225 return IEMOP_RAISE_INVALID_OPCODE();
11226 case 2:
11227 IEMOP_MNEMONIC(not_Ev, "not Ev");
11228 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11229 case 3:
11230 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11231 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11232 case 4:
11233 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11235 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11236 case 5:
11237 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11238 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11239 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11240 case 6:
11241 IEMOP_MNEMONIC(div_Ev, "div Ev");
11242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11243 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11244 case 7:
11245 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11246 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11247 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11249 }
11250}
11251
11252
11253/**
11254 * @opcode 0xf8
11255 */
11256FNIEMOP_DEF(iemOp_clc)
11257{
11258 IEMOP_MNEMONIC(clc, "clc");
11259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11260 IEM_MC_BEGIN(0, 0);
11261 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11262 IEM_MC_ADVANCE_RIP();
11263 IEM_MC_END();
11264 return VINF_SUCCESS;
11265}
11266
11267
11268/**
11269 * @opcode 0xf9
11270 */
11271FNIEMOP_DEF(iemOp_stc)
11272{
11273 IEMOP_MNEMONIC(stc, "stc");
11274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11275 IEM_MC_BEGIN(0, 0);
11276 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11277 IEM_MC_ADVANCE_RIP();
11278 IEM_MC_END();
11279 return VINF_SUCCESS;
11280}
11281
11282
11283/**
11284 * @opcode 0xfa
11285 */
11286FNIEMOP_DEF(iemOp_cli)
11287{
11288 IEMOP_MNEMONIC(cli, "cli");
11289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11290 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11291}
11292
11293
11294FNIEMOP_DEF(iemOp_sti)
11295{
11296 IEMOP_MNEMONIC(sti, "sti");
11297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11298 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11299}
11300
11301
11302/**
11303 * @opcode 0xfc
11304 */
11305FNIEMOP_DEF(iemOp_cld)
11306{
11307 IEMOP_MNEMONIC(cld, "cld");
11308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11309 IEM_MC_BEGIN(0, 0);
11310 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11311 IEM_MC_ADVANCE_RIP();
11312 IEM_MC_END();
11313 return VINF_SUCCESS;
11314}
11315
11316
11317/**
11318 * @opcode 0xfd
11319 */
11320FNIEMOP_DEF(iemOp_std)
11321{
11322 IEMOP_MNEMONIC(std, "std");
11323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11324 IEM_MC_BEGIN(0, 0);
11325 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11326 IEM_MC_ADVANCE_RIP();
11327 IEM_MC_END();
11328 return VINF_SUCCESS;
11329}
11330
11331
11332/**
11333 * @opcode 0xfe
11334 */
11335FNIEMOP_DEF(iemOp_Grp4)
11336{
11337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11338 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11339 {
11340 case 0:
11341 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11342 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11343 case 1:
11344 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11345 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11346 default:
11347 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11348 return IEMOP_RAISE_INVALID_OPCODE();
11349 }
11350}
11351
11352
11353/**
11354 * Opcode 0xff /2.
11355 * @param bRm The RM byte.
11356 */
11357FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11358{
11359 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11360 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11361
11362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11363 {
11364 /* The new RIP is taken from a register. */
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366 switch (pVCpu->iem.s.enmEffOpSize)
11367 {
11368 case IEMMODE_16BIT:
11369 IEM_MC_BEGIN(1, 0);
11370 IEM_MC_ARG(uint16_t, u16Target, 0);
11371 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11372 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11373 IEM_MC_END()
11374 return VINF_SUCCESS;
11375
11376 case IEMMODE_32BIT:
11377 IEM_MC_BEGIN(1, 0);
11378 IEM_MC_ARG(uint32_t, u32Target, 0);
11379 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11380 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11381 IEM_MC_END()
11382 return VINF_SUCCESS;
11383
11384 case IEMMODE_64BIT:
11385 IEM_MC_BEGIN(1, 0);
11386 IEM_MC_ARG(uint64_t, u64Target, 0);
11387 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11388 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11389 IEM_MC_END()
11390 return VINF_SUCCESS;
11391
11392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11393 }
11394 }
11395 else
11396 {
11397 /* The new RIP is taken from a register. */
11398 switch (pVCpu->iem.s.enmEffOpSize)
11399 {
11400 case IEMMODE_16BIT:
11401 IEM_MC_BEGIN(1, 1);
11402 IEM_MC_ARG(uint16_t, u16Target, 0);
11403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11406 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11407 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11408 IEM_MC_END()
11409 return VINF_SUCCESS;
11410
11411 case IEMMODE_32BIT:
11412 IEM_MC_BEGIN(1, 1);
11413 IEM_MC_ARG(uint32_t, u32Target, 0);
11414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11418 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11419 IEM_MC_END()
11420 return VINF_SUCCESS;
11421
11422 case IEMMODE_64BIT:
11423 IEM_MC_BEGIN(1, 1);
11424 IEM_MC_ARG(uint64_t, u64Target, 0);
11425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11428 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11429 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11430 IEM_MC_END()
11431 return VINF_SUCCESS;
11432
11433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11434 }
11435 }
11436}
11437
11438typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11439
11440FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11441{
11442 /* Registers? How?? */
11443 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11444 { /* likely */ }
11445 else
11446 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11447
11448 /* Far pointer loaded from memory. */
11449 switch (pVCpu->iem.s.enmEffOpSize)
11450 {
11451 case IEMMODE_16BIT:
11452 IEM_MC_BEGIN(3, 1);
11453 IEM_MC_ARG(uint16_t, u16Sel, 0);
11454 IEM_MC_ARG(uint16_t, offSeg, 1);
11455 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11459 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11460 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11461 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11462 IEM_MC_END();
11463 return VINF_SUCCESS;
11464
11465 case IEMMODE_64BIT:
11466 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11467 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11468 * and call far qword [rsp] encodings. */
11469 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11470 {
11471 IEM_MC_BEGIN(3, 1);
11472 IEM_MC_ARG(uint16_t, u16Sel, 0);
11473 IEM_MC_ARG(uint64_t, offSeg, 1);
11474 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11479 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11480 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11481 IEM_MC_END();
11482 return VINF_SUCCESS;
11483 }
11484 /* AMD falls thru. */
11485 /* fall thru */
11486
11487 case IEMMODE_32BIT:
11488 IEM_MC_BEGIN(3, 1);
11489 IEM_MC_ARG(uint16_t, u16Sel, 0);
11490 IEM_MC_ARG(uint32_t, offSeg, 1);
11491 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11495 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11496 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11497 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11498 IEM_MC_END();
11499 return VINF_SUCCESS;
11500
11501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11502 }
11503}
11504
11505
11506/**
11507 * Opcode 0xff /3.
11508 * @param bRm The RM byte.
11509 */
11510FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11511{
11512 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11513 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11514}
11515
11516
11517/**
11518 * Opcode 0xff /4.
11519 * @param bRm The RM byte.
11520 */
11521FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11522{
11523 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11524 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11525
11526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11527 {
11528 /* The new RIP is taken from a register. */
11529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11530 switch (pVCpu->iem.s.enmEffOpSize)
11531 {
11532 case IEMMODE_16BIT:
11533 IEM_MC_BEGIN(0, 1);
11534 IEM_MC_LOCAL(uint16_t, u16Target);
11535 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11536 IEM_MC_SET_RIP_U16(u16Target);
11537 IEM_MC_END()
11538 return VINF_SUCCESS;
11539
11540 case IEMMODE_32BIT:
11541 IEM_MC_BEGIN(0, 1);
11542 IEM_MC_LOCAL(uint32_t, u32Target);
11543 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11544 IEM_MC_SET_RIP_U32(u32Target);
11545 IEM_MC_END()
11546 return VINF_SUCCESS;
11547
11548 case IEMMODE_64BIT:
11549 IEM_MC_BEGIN(0, 1);
11550 IEM_MC_LOCAL(uint64_t, u64Target);
11551 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11552 IEM_MC_SET_RIP_U64(u64Target);
11553 IEM_MC_END()
11554 return VINF_SUCCESS;
11555
11556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11557 }
11558 }
11559 else
11560 {
11561 /* The new RIP is taken from a memory location. */
11562 switch (pVCpu->iem.s.enmEffOpSize)
11563 {
11564 case IEMMODE_16BIT:
11565 IEM_MC_BEGIN(0, 2);
11566 IEM_MC_LOCAL(uint16_t, u16Target);
11567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11570 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11571 IEM_MC_SET_RIP_U16(u16Target);
11572 IEM_MC_END()
11573 return VINF_SUCCESS;
11574
11575 case IEMMODE_32BIT:
11576 IEM_MC_BEGIN(0, 2);
11577 IEM_MC_LOCAL(uint32_t, u32Target);
11578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11582 IEM_MC_SET_RIP_U32(u32Target);
11583 IEM_MC_END()
11584 return VINF_SUCCESS;
11585
11586 case IEMMODE_64BIT:
11587 IEM_MC_BEGIN(0, 2);
11588 IEM_MC_LOCAL(uint64_t, u64Target);
11589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11593 IEM_MC_SET_RIP_U64(u64Target);
11594 IEM_MC_END()
11595 return VINF_SUCCESS;
11596
11597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11598 }
11599 }
11600}
11601
11602
11603/**
11604 * Opcode 0xff /5.
11605 * @param bRm The RM byte.
11606 */
11607FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11608{
11609 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11610 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11611}
11612
11613
11614/**
11615 * Opcode 0xff /6.
11616 * @param bRm The RM byte.
11617 */
11618FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11619{
11620 IEMOP_MNEMONIC(push_Ev, "push Ev");
11621
11622 /* Registers are handled by a common worker. */
11623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11624 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11625
11626 /* Memory we do here. */
11627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11628 switch (pVCpu->iem.s.enmEffOpSize)
11629 {
11630 case IEMMODE_16BIT:
11631 IEM_MC_BEGIN(0, 2);
11632 IEM_MC_LOCAL(uint16_t, u16Src);
11633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11636 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11637 IEM_MC_PUSH_U16(u16Src);
11638 IEM_MC_ADVANCE_RIP();
11639 IEM_MC_END();
11640 return VINF_SUCCESS;
11641
11642 case IEMMODE_32BIT:
11643 IEM_MC_BEGIN(0, 2);
11644 IEM_MC_LOCAL(uint32_t, u32Src);
11645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11648 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11649 IEM_MC_PUSH_U32(u32Src);
11650 IEM_MC_ADVANCE_RIP();
11651 IEM_MC_END();
11652 return VINF_SUCCESS;
11653
11654 case IEMMODE_64BIT:
11655 IEM_MC_BEGIN(0, 2);
11656 IEM_MC_LOCAL(uint64_t, u64Src);
11657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11660 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11661 IEM_MC_PUSH_U64(u64Src);
11662 IEM_MC_ADVANCE_RIP();
11663 IEM_MC_END();
11664 return VINF_SUCCESS;
11665
11666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11667 }
11668}
11669
11670
11671/**
11672 * @opcode 0xff
11673 */
11674FNIEMOP_DEF(iemOp_Grp5)
11675{
11676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11677 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11678 {
11679 case 0:
11680 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11681 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11682 case 1:
11683 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11684 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11685 case 2:
11686 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11687 case 3:
11688 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11689 case 4:
11690 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11691 case 5:
11692 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11693 case 6:
11694 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11695 case 7:
11696 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11697 return IEMOP_RAISE_INVALID_OPCODE();
11698 }
11699 AssertFailedReturn(VERR_IEM_IPE_3);
11700}
11701
11702
11703
11704const PFNIEMOP g_apfnOneByteMap[256] =
11705{
11706 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11707 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11708 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11709 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11710 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11711 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11712 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11713 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11714 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11715 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11716 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11717 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11718 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11719 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11720 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11721 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11722 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11723 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11724 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11725 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11726 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11727 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11728 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11729 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11730 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11731 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11732 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11733 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11734 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11735 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11736 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11737 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11738 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11739 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11740 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11741 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11742 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11743 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11744 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11745 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11746 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11747 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11748 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11749 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11750 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11751 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11752 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11753 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11754 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11755 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11756 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11757 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11758 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11759 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11760 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11761 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11762 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11763 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11764 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11765 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11766 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11767 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11768 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11769 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11770};
11771
11772
11773/** @} */
11774
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette