VirtualBox

source: vbox/trunk/src/VBox/VMM/include/IEMOpHlp.h@ 95112

Last change on this file since 95112 was 94768, checked in by vboxsync, 3 years ago

VMM/IEM: Split up IEMAll.cpp into a few more compilation units. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 18.5 KB
Line 
1/* $Id: IEMOpHlp.h 94768 2022-05-01 22:02:17Z vboxsync $ */
2/** @file
3 * IEM - Interpreted Execution Manager - Opcode Helpers.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18#ifndef VMM_INCLUDED_SRC_include_IEMOpHlp_h
19#define VMM_INCLUDED_SRC_include_IEMOpHlp_h
20#ifndef RT_WITHOUT_PRAGMA_ONCE
21# pragma once
22#endif
23
24/** @name Common opcode decoders.
25 * @{
26 */
27void iemOpStubMsg2(PVMCPUCC pVCpu) RT_NOEXCEPT;
28
29/**
30 * Complains about a stub.
31 *
32 * Providing two versions of this macro, one for daily use and one for use when
33 * working on IEM.
34 */
35#if 0
36# define IEMOP_BITCH_ABOUT_STUB() \
37 do { \
38 RTAssertMsg1(NULL, __LINE__, __FILE__, __FUNCTION__); \
39 iemOpStubMsg2(pVCpu); \
40 RTAssertPanic(); \
41 } while (0)
42#else
43# define IEMOP_BITCH_ABOUT_STUB() Log(("Stub: %s (line %d)\n", __FUNCTION__, __LINE__));
44#endif
45
46/** Stubs an opcode. */
47#define FNIEMOP_STUB(a_Name) \
48 FNIEMOP_DEF(a_Name) \
49 { \
50 RT_NOREF_PV(pVCpu); \
51 IEMOP_BITCH_ABOUT_STUB(); \
52 return VERR_IEM_INSTR_NOT_IMPLEMENTED; \
53 } \
54 typedef int ignore_semicolon
55
56/** Stubs an opcode. */
57#define FNIEMOP_STUB_1(a_Name, a_Type0, a_Name0) \
58 FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
59 { \
60 RT_NOREF_PV(pVCpu); \
61 RT_NOREF_PV(a_Name0); \
62 IEMOP_BITCH_ABOUT_STUB(); \
63 return VERR_IEM_INSTR_NOT_IMPLEMENTED; \
64 } \
65 typedef int ignore_semicolon
66
67/** Stubs an opcode which currently should raise \#UD. */
68#define FNIEMOP_UD_STUB(a_Name) \
69 FNIEMOP_DEF(a_Name) \
70 { \
71 Log(("Unsupported instruction %Rfn\n", __FUNCTION__)); \
72 return IEMOP_RAISE_INVALID_OPCODE(); \
73 } \
74 typedef int ignore_semicolon
75
76/** Stubs an opcode which currently should raise \#UD. */
77#define FNIEMOP_UD_STUB_1(a_Name, a_Type0, a_Name0) \
78 FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
79 { \
80 RT_NOREF_PV(pVCpu); \
81 RT_NOREF_PV(a_Name0); \
82 Log(("Unsupported instruction %Rfn\n", __FUNCTION__)); \
83 return IEMOP_RAISE_INVALID_OPCODE(); \
84 } \
85 typedef int ignore_semicolon
86
87/** @} */
88
89
90/** @name Opcode Debug Helpers.
91 * @{
92 */
93#ifdef VBOX_WITH_STATISTICS
94# ifdef IN_RING3
95# define IEMOP_INC_STATS(a_Stats) do { pVCpu->iem.s.StatsR3.a_Stats += 1; } while (0)
96# else
97# define IEMOP_INC_STATS(a_Stats) do { pVCpu->iem.s.StatsRZ.a_Stats += 1; } while (0)
98# endif
99#else
100# define IEMOP_INC_STATS(a_Stats) do { } while (0)
101#endif
102
103#ifdef DEBUG
104# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) \
105 do { \
106 IEMOP_INC_STATS(a_Stats); \
107 Log4(("decode - %04x:%RGv %s%s [#%u]\n", pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, \
108 pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK ? "lock " : "", a_szMnemonic, pVCpu->iem.s.cInstructions)); \
109 } while (0)
110
111# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
112 do { \
113 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
114 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
115 (void)RT_CONCAT(OP_,a_Upper); \
116 (void)(a_fDisHints); \
117 (void)(a_fIemHints); \
118 } while (0)
119
120# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
121 do { \
122 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
123 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
124 (void)RT_CONCAT(OP_,a_Upper); \
125 (void)RT_CONCAT(OP_PARM_,a_Op1); \
126 (void)(a_fDisHints); \
127 (void)(a_fIemHints); \
128 } while (0)
129
130# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
131 do { \
132 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
133 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
134 (void)RT_CONCAT(OP_,a_Upper); \
135 (void)RT_CONCAT(OP_PARM_,a_Op1); \
136 (void)RT_CONCAT(OP_PARM_,a_Op2); \
137 (void)(a_fDisHints); \
138 (void)(a_fIemHints); \
139 } while (0)
140
141# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
142 do { \
143 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
144 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
145 (void)RT_CONCAT(OP_,a_Upper); \
146 (void)RT_CONCAT(OP_PARM_,a_Op1); \
147 (void)RT_CONCAT(OP_PARM_,a_Op2); \
148 (void)RT_CONCAT(OP_PARM_,a_Op3); \
149 (void)(a_fDisHints); \
150 (void)(a_fIemHints); \
151 } while (0)
152
153# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
154 do { \
155 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
156 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
157 (void)RT_CONCAT(OP_,a_Upper); \
158 (void)RT_CONCAT(OP_PARM_,a_Op1); \
159 (void)RT_CONCAT(OP_PARM_,a_Op2); \
160 (void)RT_CONCAT(OP_PARM_,a_Op3); \
161 (void)RT_CONCAT(OP_PARM_,a_Op4); \
162 (void)(a_fDisHints); \
163 (void)(a_fIemHints); \
164 } while (0)
165
166#else
167# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) IEMOP_INC_STATS(a_Stats)
168
169# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
170 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
171# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
172 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
173# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
174 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
175# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
176 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
177# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
178 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
179
180#endif
181
182#define IEMOP_MNEMONIC0(a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
183 IEMOP_MNEMONIC0EX(a_Lower, \
184 #a_Lower, \
185 a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints)
186#define IEMOP_MNEMONIC1(a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
187 IEMOP_MNEMONIC1EX(RT_CONCAT3(a_Lower,_,a_Op1), \
188 #a_Lower " " #a_Op1, \
189 a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints)
190#define IEMOP_MNEMONIC2(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
191 IEMOP_MNEMONIC2EX(RT_CONCAT5(a_Lower,_,a_Op1,_,a_Op2), \
192 #a_Lower " " #a_Op1 "," #a_Op2, \
193 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints)
194#define IEMOP_MNEMONIC3(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
195 IEMOP_MNEMONIC3EX(RT_CONCAT7(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3), \
196 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3, \
197 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints)
198#define IEMOP_MNEMONIC4(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
199 IEMOP_MNEMONIC4EX(RT_CONCAT9(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3,_,a_Op4), \
200 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3 "," #a_Op4, \
201 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints)
202
203/** @} */
204
205
206/** @name Opcode Helpers.
207 * @{
208 */
209
210#ifdef IN_RING3
211# define IEMOP_HLP_MIN_CPU(a_uMinCpu, a_fOnlyIf) \
212 do { \
213 if (IEM_GET_TARGET_CPU(pVCpu) >= (a_uMinCpu) || !(a_fOnlyIf)) { } \
214 else \
215 { \
216 (void)DBGFSTOP(pVCpu->CTX_SUFF(pVM)); \
217 return IEMOP_RAISE_INVALID_OPCODE(); \
218 } \
219 } while (0)
220#else
221# define IEMOP_HLP_MIN_CPU(a_uMinCpu, a_fOnlyIf) \
222 do { \
223 if (IEM_GET_TARGET_CPU(pVCpu) >= (a_uMinCpu) || !(a_fOnlyIf)) { } \
224 else return IEMOP_RAISE_INVALID_OPCODE(); \
225 } while (0)
226#endif
227
228/** The instruction requires a 186 or later. */
229#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_186
230# define IEMOP_HLP_MIN_186() do { } while (0)
231#else
232# define IEMOP_HLP_MIN_186() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_186, true)
233#endif
234
235/** The instruction requires a 286 or later. */
236#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_286
237# define IEMOP_HLP_MIN_286() do { } while (0)
238#else
239# define IEMOP_HLP_MIN_286() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_286, true)
240#endif
241
242/** The instruction requires a 386 or later. */
243#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
244# define IEMOP_HLP_MIN_386() do { } while (0)
245#else
246# define IEMOP_HLP_MIN_386() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, true)
247#endif
248
249/** The instruction requires a 386 or later if the given expression is true. */
250#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
251# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) do { } while (0)
252#else
253# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, a_fOnlyIf)
254#endif
255
256/** The instruction requires a 486 or later. */
257#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_486
258# define IEMOP_HLP_MIN_486() do { } while (0)
259#else
260# define IEMOP_HLP_MIN_486() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_486, true)
261#endif
262
263/** The instruction requires a Pentium (586) or later. */
264#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PENTIUM
265# define IEMOP_HLP_MIN_586() do { } while (0)
266#else
267# define IEMOP_HLP_MIN_586() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PENTIUM, true)
268#endif
269
270/** The instruction requires a PentiumPro (686) or later. */
271#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PPRO
272# define IEMOP_HLP_MIN_686() do { } while (0)
273#else
274# define IEMOP_HLP_MIN_686() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PPRO, true)
275#endif
276
277
278/** The instruction raises an \#UD in real and V8086 mode. */
279#define IEMOP_HLP_NO_REAL_OR_V86_MODE() \
280 do \
281 { \
282 if (!IEM_IS_REAL_OR_V86_MODE(pVCpu)) { /* likely */ } \
283 else return IEMOP_RAISE_INVALID_OPCODE(); \
284 } while (0)
285
286#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
287/** This instruction raises an \#UD in real and V8086 mode or when not using a
288 * 64-bit code segment when in long mode (applicable to all VMX instructions
289 * except VMCALL).
290 */
291#define IEMOP_HLP_VMX_INSTR(a_szInstr, a_InsDiagPrefix) \
292 do \
293 { \
294 if ( !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
295 && ( !IEM_IS_LONG_MODE(pVCpu) \
296 || IEM_IS_64BIT_CODE(pVCpu))) \
297 { /* likely */ } \
298 else \
299 { \
300 if (IEM_IS_REAL_OR_V86_MODE(pVCpu)) \
301 { \
302 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_RealOrV86Mode; \
303 Log5((a_szInstr ": Real or v8086 mode -> #UD\n")); \
304 return IEMOP_RAISE_INVALID_OPCODE(); \
305 } \
306 if (IEM_IS_LONG_MODE(pVCpu) && !IEM_IS_64BIT_CODE(pVCpu)) \
307 { \
308 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_LongModeCS; \
309 Log5((a_szInstr ": Long mode without 64-bit code segment -> #UD\n")); \
310 return IEMOP_RAISE_INVALID_OPCODE(); \
311 } \
312 } \
313 } while (0)
314
315/** The instruction can only be executed in VMX operation (VMX root mode and
316 * non-root mode).
317 *
318 * @note Update IEM_VMX_IN_VMX_OPERATION if changes are made here.
319 */
320# define IEMOP_HLP_IN_VMX_OPERATION(a_szInstr, a_InsDiagPrefix) \
321 do \
322 { \
323 if (IEM_VMX_IS_ROOT_MODE(pVCpu)) { /* likely */ } \
324 else \
325 { \
326 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_VmxRoot; \
327 Log5((a_szInstr ": Not in VMX operation (root mode) -> #UD\n")); \
328 return IEMOP_RAISE_INVALID_OPCODE(); \
329 } \
330 } while (0)
331#endif /* VBOX_WITH_NESTED_HWVIRT_VMX */
332
333/** The instruction is not available in 64-bit mode, throw \#UD if we're in
334 * 64-bit mode. */
335#define IEMOP_HLP_NO_64BIT() \
336 do \
337 { \
338 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) \
339 return IEMOP_RAISE_INVALID_OPCODE(); \
340 } while (0)
341
342/** The instruction is only available in 64-bit mode, throw \#UD if we're not in
343 * 64-bit mode. */
344#define IEMOP_HLP_ONLY_64BIT() \
345 do \
346 { \
347 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT) \
348 return IEMOP_RAISE_INVALID_OPCODE(); \
349 } while (0)
350
351/** The instruction defaults to 64-bit operand size if 64-bit mode. */
352#define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE() \
353 do \
354 { \
355 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) \
356 iemRecalEffOpSize64Default(pVCpu); \
357 } while (0)
358
359/** The instruction has 64-bit operand size if 64-bit mode. */
360#define IEMOP_HLP_64BIT_OP_SIZE() \
361 do \
362 { \
363 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) \
364 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT; \
365 } while (0)
366
367/** Only a REX prefix immediately preceeding the first opcode byte takes
368 * effect. This macro helps ensuring this as well as logging bad guest code. */
369#define IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE(a_szPrf) \
370 do \
371 { \
372 if (RT_UNLIKELY(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX)) \
373 { \
374 Log5((a_szPrf ": Overriding REX prefix at %RX16! fPrefixes=%#x\n", pVCpu->cpum.GstCtx.rip, pVCpu->iem.s.fPrefixes)); \
375 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REX_MASK; \
376 pVCpu->iem.s.uRexB = 0; \
377 pVCpu->iem.s.uRexIndex = 0; \
378 pVCpu->iem.s.uRexReg = 0; \
379 iemRecalEffOpSize(pVCpu); \
380 } \
381 } while (0)
382
383/**
384 * Done decoding.
385 */
386#define IEMOP_HLP_DONE_DECODING() \
387 do \
388 { \
389 /*nothing for now, maybe later... */ \
390 } while (0)
391
392/**
393 * Done decoding, raise \#UD exception if lock prefix present.
394 */
395#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX() \
396 do \
397 { \
398 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
399 { /* likely */ } \
400 else \
401 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
402 } while (0)
403
404
405/**
406 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
407 * repnz or size prefixes are present, or if in real or v8086 mode.
408 */
409#define IEMOP_HLP_DONE_VEX_DECODING() \
410 do \
411 { \
412 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
413 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
414 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) )) \
415 { /* likely */ } \
416 else \
417 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
418 } while (0)
419
420/**
421 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
422 * repnz or size prefixes are present, or if in real or v8086 mode.
423 */
424#define IEMOP_HLP_DONE_VEX_DECODING_L0() \
425 do \
426 { \
427 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
428 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
429 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
430 && pVCpu->iem.s.uVexLength == 0)) \
431 { /* likely */ } \
432 else \
433 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
434 } while (0)
435
436
437/**
438 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
439 * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate
440 * register 0, or if in real or v8086 mode.
441 */
442#define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV() \
443 do \
444 { \
445 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
446 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
447 && !pVCpu->iem.s.uVex3rdReg \
448 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) )) \
449 { /* likely */ } \
450 else \
451 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
452 } while (0)
453
454/**
455 * Done decoding VEX, no V, L=0.
456 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
457 * we're in real or v8086 mode, if VEX.V!=0xf, or if VEX.L!=0.
458 */
459#define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV() \
460 do \
461 { \
462 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
463 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
464 && pVCpu->iem.s.uVexLength == 0 \
465 && pVCpu->iem.s.uVex3rdReg == 0 \
466 && !IEM_IS_REAL_OR_V86_MODE(pVCpu))) \
467 { /* likely */ } \
468 else \
469 return IEMOP_RAISE_INVALID_OPCODE(); \
470 } while (0)
471
472#define IEMOP_HLP_DECODED_NL_1(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_fDisOpType) \
473 do \
474 { \
475 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
476 { /* likely */ } \
477 else \
478 { \
479 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_fDisOpType); \
480 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
481 } \
482 } while (0)
483#define IEMOP_HLP_DECODED_NL_2(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_uDisParam1, a_fDisOpType) \
484 do \
485 { \
486 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
487 { /* likely */ } \
488 else \
489 { \
490 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_uDisParam1); NOREF(a_fDisOpType); \
491 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
492 } \
493 } while (0)
494
495/**
496 * Done decoding, raise \#UD exception if any lock, repz or repnz prefixes
497 * are present.
498 */
499#define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() \
500 do \
501 { \
502 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
503 { /* likely */ } \
504 else \
505 return IEMOP_RAISE_INVALID_OPCODE(); \
506 } while (0)
507
508/**
509 * Done decoding, raise \#UD exception if any operand-size override, repz or repnz
510 * prefixes are present.
511 */
512#define IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES() \
513 do \
514 { \
515 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
516 { /* likely */ } \
517 else \
518 return IEMOP_RAISE_INVALID_OPCODE(); \
519 } while (0)
520
521VBOXSTRICTRC iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm, PRTGCPTR pGCPtrEff) RT_NOEXCEPT;
522VBOXSTRICTRC iemOpHlpCalcRmEffAddrEx(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm, PRTGCPTR pGCPtrEff, int8_t offRsp) RT_NOEXCEPT;
523#ifdef IEM_WITH_SETJMP
524RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint8_t cbImm) RT_NOEXCEPT;
525#endif
526
527/** @} */
528
529#endif /* !VMM_INCLUDED_SRC_include_IEMOpHlp_h */
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette