VirtualBox

source: vbox/trunk/src/VBox/VMM/include/IEMOpHlp.h@ 100725

Last change on this file since 100725 was 100714, checked in by vboxsync, 16 months ago

VMM/IEM: Require a IEMOP_HLP_DONE_DECODING in all MC blocks so we know exacly when the recompiler starts emitting code (calls) and we can make sure it's still safe to restart insturction decoding. Also made the python script check this and that nothing that smells like decoding happens after IEMOP_HLP_DONE_DECODING and its friends. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 23.0 KB
Line 
1/* $Id: IEMOpHlp.h 100714 2023-07-27 10:12:09Z vboxsync $ */
2/** @file
3 * IEM - Interpreted Execution Manager - Opcode Helpers.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28#ifndef VMM_INCLUDED_SRC_include_IEMOpHlp_h
29#define VMM_INCLUDED_SRC_include_IEMOpHlp_h
30#ifndef RT_WITHOUT_PRAGMA_ONCE
31# pragma once
32#endif
33
34/** @name Common opcode decoders.
35 * @{
36 */
37void iemOpStubMsg2(PVMCPUCC pVCpu) RT_NOEXCEPT;
38
39/**
40 * Complains about a stub.
41 *
42 * Providing two versions of this macro, one for daily use and one for use when
43 * working on IEM.
44 */
45#if 0
46# define IEMOP_BITCH_ABOUT_STUB() \
47 do { \
48 RTAssertMsg1(NULL, __LINE__, __FILE__, __FUNCTION__); \
49 iemOpStubMsg2(pVCpu); \
50 RTAssertPanic(); \
51 } while (0)
52#else
53# define IEMOP_BITCH_ABOUT_STUB() Log(("Stub: %s (line %d)\n", __FUNCTION__, __LINE__));
54#endif
55
56/** Stubs an opcode. */
57#define FNIEMOP_STUB(a_Name) \
58 FNIEMOP_DEF(a_Name) \
59 { \
60 RT_NOREF_PV(pVCpu); \
61 IEMOP_BITCH_ABOUT_STUB(); \
62 return VERR_IEM_INSTR_NOT_IMPLEMENTED; \
63 } \
64 typedef int ignore_semicolon
65
66/** Stubs an opcode. */
67#define FNIEMOP_STUB_1(a_Name, a_Type0, a_Name0) \
68 FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
69 { \
70 RT_NOREF_PV(pVCpu); \
71 RT_NOREF_PV(a_Name0); \
72 IEMOP_BITCH_ABOUT_STUB(); \
73 return VERR_IEM_INSTR_NOT_IMPLEMENTED; \
74 } \
75 typedef int ignore_semicolon
76
77/** Stubs an opcode which currently should raise \#UD. */
78#define FNIEMOP_UD_STUB(a_Name) \
79 FNIEMOP_DEF(a_Name) \
80 { \
81 Log(("Unsupported instruction %Rfn\n", __FUNCTION__)); \
82 IEMOP_RAISE_INVALID_OPCODE_RET(); \
83 } \
84 typedef int ignore_semicolon
85
86/** Stubs an opcode which currently should raise \#UD. */
87#define FNIEMOP_UD_STUB_1(a_Name, a_Type0, a_Name0) \
88 FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
89 { \
90 RT_NOREF_PV(pVCpu); \
91 RT_NOREF_PV(a_Name0); \
92 Log(("Unsupported instruction %Rfn\n", __FUNCTION__)); \
93 IEMOP_RAISE_INVALID_OPCODE_RET(); \
94 } \
95 typedef int ignore_semicolon
96
97/** @} */
98
99
100/** @name Opcode Debug Helpers.
101 * @{
102 */
103#ifdef VBOX_WITH_STATISTICS
104# ifdef IN_RING3
105# define IEMOP_INC_STATS(a_Stats) do { pVCpu->iem.s.StatsR3.a_Stats += 1; } while (0)
106# else
107# define IEMOP_INC_STATS(a_Stats) do { pVCpu->iem.s.StatsRZ.a_Stats += 1; } while (0)
108# endif
109#else
110# define IEMOP_INC_STATS(a_Stats) do { } while (0)
111#endif
112
113#ifdef DEBUG
114# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) \
115 do { \
116 IEMOP_INC_STATS(a_Stats); \
117 Log4(("decode - %04x:%RGv %s%s [#%u]\n", pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, \
118 pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK ? "lock " : "", a_szMnemonic, pVCpu->iem.s.cInstructions)); \
119 } while (0)
120
121# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
122 do { \
123 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
124 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
125 (void)RT_CONCAT(OP_,a_Upper); \
126 (void)(a_fDisHints); \
127 (void)(a_fIemHints); \
128 } while (0)
129
130# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
131 do { \
132 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
133 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
134 (void)RT_CONCAT(OP_,a_Upper); \
135 (void)RT_CONCAT(OP_PARM_,a_Op1); \
136 (void)(a_fDisHints); \
137 (void)(a_fIemHints); \
138 } while (0)
139
140# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
141 do { \
142 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
143 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
144 (void)RT_CONCAT(OP_,a_Upper); \
145 (void)RT_CONCAT(OP_PARM_,a_Op1); \
146 (void)RT_CONCAT(OP_PARM_,a_Op2); \
147 (void)(a_fDisHints); \
148 (void)(a_fIemHints); \
149 } while (0)
150
151# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
152 do { \
153 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
154 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
155 (void)RT_CONCAT(OP_,a_Upper); \
156 (void)RT_CONCAT(OP_PARM_,a_Op1); \
157 (void)RT_CONCAT(OP_PARM_,a_Op2); \
158 (void)RT_CONCAT(OP_PARM_,a_Op3); \
159 (void)(a_fDisHints); \
160 (void)(a_fIemHints); \
161 } while (0)
162
163# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
164 do { \
165 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
166 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
167 (void)RT_CONCAT(OP_,a_Upper); \
168 (void)RT_CONCAT(OP_PARM_,a_Op1); \
169 (void)RT_CONCAT(OP_PARM_,a_Op2); \
170 (void)RT_CONCAT(OP_PARM_,a_Op3); \
171 (void)RT_CONCAT(OP_PARM_,a_Op4); \
172 (void)(a_fDisHints); \
173 (void)(a_fIemHints); \
174 } while (0)
175
176#else
177# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) IEMOP_INC_STATS(a_Stats)
178
179# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
180 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
181# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
182 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
183# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
184 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
185# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
186 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
187# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
188 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
189
190#endif
191
192#define IEMOP_MNEMONIC0(a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
193 IEMOP_MNEMONIC0EX(a_Lower, \
194 #a_Lower, \
195 a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints)
196#define IEMOP_MNEMONIC1(a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
197 IEMOP_MNEMONIC1EX(RT_CONCAT3(a_Lower,_,a_Op1), \
198 #a_Lower " " #a_Op1, \
199 a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints)
200#define IEMOP_MNEMONIC2(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
201 IEMOP_MNEMONIC2EX(RT_CONCAT5(a_Lower,_,a_Op1,_,a_Op2), \
202 #a_Lower " " #a_Op1 "," #a_Op2, \
203 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints)
204#define IEMOP_MNEMONIC3(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
205 IEMOP_MNEMONIC3EX(RT_CONCAT7(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3), \
206 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3, \
207 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints)
208#define IEMOP_MNEMONIC4(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
209 IEMOP_MNEMONIC4EX(RT_CONCAT9(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3,_,a_Op4), \
210 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3 "," #a_Op4, \
211 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints)
212
213/** @} */
214
215
216/** @name Opcode Helpers.
217 * @{
218 */
219
220#ifdef IN_RING3
221# define IEMOP_HLP_MIN_CPU(a_uMinCpu, a_fOnlyIf) \
222 do { \
223 if (IEM_GET_TARGET_CPU(pVCpu) >= (a_uMinCpu) || !(a_fOnlyIf)) { } \
224 else \
225 { \
226 (void)DBGFSTOP(pVCpu->CTX_SUFF(pVM)); \
227 IEMOP_RAISE_INVALID_OPCODE_RET(); \
228 } \
229 } while (0)
230#else
231# define IEMOP_HLP_MIN_CPU(a_uMinCpu, a_fOnlyIf) \
232 do { \
233 if (IEM_GET_TARGET_CPU(pVCpu) >= (a_uMinCpu) || !(a_fOnlyIf)) { } \
234 else IEMOP_RAISE_INVALID_OPCODE_RET(); \
235 } while (0)
236#endif
237
238/** The instruction requires a 186 or later. */
239#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_186
240# define IEMOP_HLP_MIN_186() do { } while (0)
241#else
242# define IEMOP_HLP_MIN_186() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_186, true)
243#endif
244
245/** The instruction requires a 286 or later. */
246#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_286
247# define IEMOP_HLP_MIN_286() do { } while (0)
248#else
249# define IEMOP_HLP_MIN_286() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_286, true)
250#endif
251
252/** The instruction requires a 386 or later. */
253#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
254# define IEMOP_HLP_MIN_386() do { } while (0)
255#else
256# define IEMOP_HLP_MIN_386() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, true)
257#endif
258
259/** The instruction requires a 386 or later if the given expression is true. */
260#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
261# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) do { } while (0)
262#else
263# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, a_fOnlyIf)
264#endif
265
266/** The instruction requires a 486 or later. */
267#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_486
268# define IEMOP_HLP_MIN_486() do { } while (0)
269#else
270# define IEMOP_HLP_MIN_486() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_486, true)
271#endif
272
273/** The instruction requires a Pentium (586) or later. */
274#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PENTIUM
275# define IEMOP_HLP_MIN_586() do { } while (0)
276#else
277# define IEMOP_HLP_MIN_586() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PENTIUM, true)
278#endif
279
280/** The instruction requires a PentiumPro (686) or later. */
281#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PPRO
282# define IEMOP_HLP_MIN_686() do { } while (0)
283#else
284# define IEMOP_HLP_MIN_686() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PPRO, true)
285#endif
286
287
288/** The instruction raises an \#UD in real and V8086 mode. */
289#define IEMOP_HLP_NO_REAL_OR_V86_MODE() \
290 do \
291 { \
292 if (!IEM_IS_REAL_OR_V86_MODE(pVCpu)) { /* likely */ } \
293 else IEMOP_RAISE_INVALID_OPCODE_RET(); \
294 } while (0)
295
296#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
297/** This instruction raises an \#UD in real and V8086 mode or when not using a
298 * 64-bit code segment when in long mode (applicable to all VMX instructions
299 * except VMCALL).
300 */
301#define IEMOP_HLP_VMX_INSTR(a_szInstr, a_InsDiagPrefix) \
302 do \
303 { \
304 if ( !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
305 && ( !IEM_IS_LONG_MODE(pVCpu) \
306 || IEM_IS_64BIT_CODE(pVCpu))) \
307 { /* likely */ } \
308 else \
309 { \
310 if (IEM_IS_REAL_OR_V86_MODE(pVCpu)) \
311 { \
312 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_RealOrV86Mode; \
313 Log5((a_szInstr ": Real or v8086 mode -> #UD\n")); \
314 IEMOP_RAISE_INVALID_OPCODE_RET(); \
315 } \
316 if (IEM_IS_LONG_MODE(pVCpu) && !IEM_IS_64BIT_CODE(pVCpu)) \
317 { \
318 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_LongModeCS; \
319 Log5((a_szInstr ": Long mode without 64-bit code segment -> #UD\n")); \
320 IEMOP_RAISE_INVALID_OPCODE_RET(); \
321 } \
322 } \
323 } while (0)
324
325/** The instruction can only be executed in VMX operation (VMX root mode and
326 * non-root mode).
327 *
328 * @note Update IEM_VMX_IN_VMX_OPERATION if changes are made here.
329 *
330 * @todo r=bird: This is absolutely *INCORRECT* since IEM_VMX_IS_ROOT_MODE
331 * is a complicated runtime state (calls CPUMIsGuestInVmxRootMode), and
332 * not something we can decide while decoding. Convert to an IEM_MC!
333 */
334# define IEMOP_HLP_IN_VMX_OPERATION(a_szInstr, a_InsDiagPrefix) \
335 do \
336 { \
337 if (IEM_VMX_IS_ROOT_MODE(pVCpu)) { /* likely */ } \
338 else \
339 { \
340 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_VmxRoot; \
341 Log5((a_szInstr ": Not in VMX operation (root mode) -> #UD\n")); \
342 IEMOP_RAISE_INVALID_OPCODE_RET(); \
343 } \
344 } while (0)
345#endif /* VBOX_WITH_NESTED_HWVIRT_VMX */
346
347/** The instruction is not available in 64-bit mode, throw \#UD if we're in
348 * 64-bit mode. */
349#define IEMOP_HLP_NO_64BIT() \
350 do \
351 { \
352 if (!IEM_IS_64BIT_CODE(pVCpu)) \
353 { /* likely */ } \
354 else \
355 IEMOP_RAISE_INVALID_OPCODE_RET(); \
356 } while (0)
357
358/** The instruction is only available in 64-bit mode, throw \#UD if we're not in
359 * 64-bit mode. */
360#define IEMOP_HLP_ONLY_64BIT() \
361 do \
362 { \
363 if (IEM_IS_64BIT_CODE(pVCpu)) \
364 { /* likely */ } \
365 else \
366 IEMOP_RAISE_INVALID_OPCODE_RET(); \
367 } while (0)
368
369/** The instruction defaults to 64-bit operand size if 64-bit mode. */
370#define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE() \
371 do \
372 { \
373 if (IEM_IS_64BIT_CODE(pVCpu)) \
374 iemRecalEffOpSize64Default(pVCpu); \
375 } while (0)
376
377/** The instruction defaults to 64-bit operand size if 64-bit mode and intel
378 * CPUs ignore the operand size prefix complete (e.g. relative jumps). */
379#define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX() \
380 do \
381 { \
382 if (IEM_IS_64BIT_CODE(pVCpu)) \
383 iemRecalEffOpSize64DefaultAndIntelIgnoresOpSizePrefix(pVCpu); \
384 } while (0)
385
386/** The instruction has 64-bit operand size if 64-bit mode. */
387#define IEMOP_HLP_64BIT_OP_SIZE() \
388 do \
389 { \
390 if (IEM_IS_64BIT_CODE(pVCpu)) \
391 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT; \
392 } while (0)
393
394/** Only a REX prefix immediately preceeding the first opcode byte takes
395 * effect. This macro helps ensuring this as well as logging bad guest code. */
396#define IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE(a_szPrf) \
397 do \
398 { \
399 if (RT_UNLIKELY(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX)) \
400 { \
401 Log5((a_szPrf ": Overriding REX prefix at %RX16! fPrefixes=%#x\n", pVCpu->cpum.GstCtx.rip, pVCpu->iem.s.fPrefixes)); \
402 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REX_MASK; \
403 pVCpu->iem.s.uRexB = 0; \
404 pVCpu->iem.s.uRexIndex = 0; \
405 pVCpu->iem.s.uRexReg = 0; \
406 iemRecalEffOpSize(pVCpu); \
407 } \
408 } while (0)
409
410/**
411 * Done decoding.
412 */
413#define IEMOP_HLP_DONE_DECODING() \
414 do \
415 { \
416 /*nothing for now, maybe later... */ \
417 } while (0)
418
419/**
420 * Done decoding, raise \#UD exception if lock prefix present.
421 */
422#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX() \
423 do \
424 { \
425 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
426 { /* likely */ } \
427 else \
428 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
429 } while (0)
430
431/**
432 * Done decoding, raise \#UD exception if lock prefix present, or if the
433 * a_fFeature is present in the guest CPU.
434 */
435#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(a_fFeature) \
436 do \
437 { \
438 if (RT_LIKELY( !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) \
439 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
440 { /* likely */ } \
441 else \
442 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
443 } while (0)
444
445/**
446 * Done decoding, raise \#UD exception if lock prefix present, or if the
447 * a_fFeature is present in the guest CPU.
448 */
449#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(a_fFeature1, a_fFeature2) \
450 do \
451 { \
452 if (RT_LIKELY( !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) \
453 && ( IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature1 \
454 || IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2) )) \
455 { /* likely */ } \
456 else \
457 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
458 } while (0)
459
460
461/**
462 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
463 * repnz or size prefixes are present, if in real or v8086 mode, or if the
464 * a_fFeature is not present in the guest CPU.
465 */
466#define IEMOP_HLP_DONE_VEX_DECODING_EX(a_fFeature) \
467 do \
468 { \
469 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
470 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
471 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
472 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
473 { /* likely */ } \
474 else \
475 IEMOP_RAISE_INVALID_OPCODE_RET(); \
476 } while (0)
477
478/**
479 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
480 * repnz or size prefixes are present, or if in real or v8086 mode, or if the
481 * a_fFeature is not present in the guest CPU.
482 */
483#define IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeature) \
484 do \
485 { \
486 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
487 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
488 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
489 && pVCpu->iem.s.uVexLength == 0 \
490 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
491 { /* likely */ } \
492 else \
493 IEMOP_RAISE_INVALID_OPCODE_RET(); \
494 } while (0)
495
496/**
497 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
498 * repnz or size prefixes are present, or if in real or v8086 mode, or if the
499 * a_fFeature is not present in the guest CPU.
500 */
501#define IEMOP_HLP_DONE_VEX_DECODING_L1_EX(a_fFeature) \
502 do \
503 { \
504 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
505 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
506 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
507 && pVCpu->iem.s.uVexLength == 1 \
508 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
509 { /* likely */ } \
510 else \
511 IEMOP_RAISE_INVALID_OPCODE_RET(); \
512 } while (0)
513
514/**
515 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
516 * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate
517 * register 0, if in real or v8086 mode, or if the a_fFeature is not present in
518 * the guest CPU.
519 */
520#define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) \
521 do \
522 { \
523 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
524 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
525 && !pVCpu->iem.s.uVex3rdReg \
526 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
527 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
528 { /* likely */ } \
529 else \
530 IEMOP_RAISE_INVALID_OPCODE_RET(); \
531 } while (0)
532
533/**
534 * Done decoding VEX, no V, L=0.
535 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
536 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature
537 * is not present in the guest CPU.
538 */
539#define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) \
540 do \
541 { \
542 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
543 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
544 && pVCpu->iem.s.uVexLength == 0 \
545 && pVCpu->iem.s.uVex3rdReg == 0 \
546 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
547 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
548 { /* likely */ } \
549 else \
550 IEMOP_RAISE_INVALID_OPCODE_RET(); \
551 } while (0)
552
553/**
554 * Done decoding VEX, no V, L=1.
555 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
556 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=1, or if the a_fFeature
557 * is not present in the guest CPU.
558 */
559#define IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(a_fFeature) \
560 do \
561 { \
562 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
563 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
564 && pVCpu->iem.s.uVexLength == 1 \
565 && pVCpu->iem.s.uVex3rdReg == 0 \
566 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
567 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
568 { /* likely */ } \
569 else \
570 IEMOP_RAISE_INVALID_OPCODE_RET(); \
571 } while (0)
572
573#define IEMOP_HLP_DECODED_NL_1(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_fDisOpType) \
574 do \
575 { \
576 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
577 { /* likely */ } \
578 else \
579 { \
580 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_fDisOpType); \
581 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
582 } \
583 } while (0)
584#define IEMOP_HLP_DECODED_NL_2(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_uDisParam1, a_fDisOpType) \
585 do \
586 { \
587 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
588 { /* likely */ } \
589 else \
590 { \
591 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_uDisParam1); NOREF(a_fDisOpType); \
592 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
593 } \
594 } while (0)
595
596/**
597 * Done decoding, raise \#UD exception if any lock, repz or repnz prefixes
598 * are present.
599 */
600#define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() \
601 do \
602 { \
603 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
604 { /* likely */ } \
605 else \
606 IEMOP_RAISE_INVALID_OPCODE_RET(); \
607 } while (0)
608
609/**
610 * Done decoding, raise \#UD exception if any operand-size override, repz or repnz
611 * prefixes are present.
612 */
613#define IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES() \
614 do \
615 { \
616 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
617 { /* likely */ } \
618 else \
619 IEMOP_RAISE_INVALID_OPCODE_RET(); \
620 } while (0)
621
622/**
623 * Check for a CPUMFEATURES member to be true, raise \#UD if clear.
624 */
625#define IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, a_fFeature) \
626 do \
627 { \
628 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature) \
629 { /* likely */ } \
630 else \
631 IEMOP_RAISE_INVALID_OPCODE_RET(); \
632 } while (0)
633
634VBOXSTRICTRC iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, PRTGCPTR pGCPtrEff) RT_NOEXCEPT;
635VBOXSTRICTRC iemOpHlpCalcRmEffAddrEx(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, PRTGCPTR pGCPtrEff, uint64_t *puInfo) RT_NOEXCEPT;
636#ifdef IEM_WITH_SETJMP
637RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset) IEM_NOEXCEPT_MAY_LONGJMP;
638RTGCPTR iemOpHlpCalcRmEffAddrJmpEx(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, uint64_t *puInfo) IEM_NOEXCEPT_MAY_LONGJMP;
639#endif
640
641/** @} */
642
643#endif /* !VMM_INCLUDED_SRC_include_IEMOpHlp_h */
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette