VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 103005

Last change on this file since 103005 was 103005, checked in by vboxsync, 10 months ago

iprt/asm.h,*: Split out the ASMMem* and related stuff into a separate header, asm-mem.h, so that we can get the RT_ASM_PAGE_SIZE stuff out of the way.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 366.2 KB
Line 
1/* $Id: bs3-cpu-basic-2-x0.c 103005 2024-01-23 23:55:58Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <iprt/asm.h>
44#include <iprt/asm-amd64-x86.h>
45#include <iprt/asm-mem.h>
46
47
48/*********************************************************************************************************************************
49* Defined Constants And Macros *
50*********************************************************************************************************************************/
51#undef CHECK_MEMBER
52#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
53 do \
54 { \
55 if ((a_Actual) == (a_Expected)) { /* likely */ } \
56 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
57 } while (0)
58
59
60/** Indicating that we've got operand size prefix and that it matters. */
61#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
62/** Worker requires 386 or later. */
63#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
64
65
66/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
67 *
68 * These are flags, though we've precombined a few shortening things down.
69 *
70 * @{ */
71#define MYOP_LD 0x1 /**< The instruction loads. */
72#define MYOP_ST 0x2 /**< The instruction stores */
73#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
74#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
75
76#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
77#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
78/** @} */
79
80
81/*********************************************************************************************************************************
82* Structures and Typedefs *
83*********************************************************************************************************************************/
84/** Near void pointer. */
85typedef void BS3_NEAR *NPVOID;
86
87typedef struct BS3CB2INVLDESCTYPE
88{
89 uint8_t u4Type;
90 uint8_t u1DescType;
91} BS3CB2INVLDESCTYPE;
92
93typedef struct BS3CB2SIDTSGDT
94{
95 const char *pszDesc;
96 FPFNBS3FAR fpfnWorker;
97 uint8_t cbInstr;
98 bool fSs;
99 uint8_t bMode;
100 uint8_t fFlags;
101} BS3CB2SIDTSGDT;
102
103
104typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
105
106typedef struct FNBS3CPUBASIC2ACTSTCODE
107{
108 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
109 uint8_t fOp;
110 uint16_t cbMem;
111 uint8_t cbAlign;
112 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
113} FNBS3CPUBASIC2ACTSTCODE;
114typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
115
116typedef struct BS3CPUBASIC2ACTTSTCMNMODE
117{
118 uint8_t bMode;
119 uint16_t cEntries;
120 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
121} BS3CPUBASIC2PFTTSTCMNMODE;
122typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
123
124
125/*********************************************************************************************************************************
126* External Symbols *
127*********************************************************************************************************************************/
128extern FNBS3FAR bs3CpuBasic2_Int80;
129extern FNBS3FAR bs3CpuBasic2_Int81;
130extern FNBS3FAR bs3CpuBasic2_Int82;
131extern FNBS3FAR bs3CpuBasic2_Int83;
132
133extern FNBS3FAR bs3CpuBasic2_ud2;
134#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
135extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
136
137extern FNBS3FAR bs3CpuBasic2_salc_ud2;
138extern FNBS3FAR bs3CpuBasic2_swapgs;
139
140extern FNBS3FAR bs3CpuBasic2_iret;
141extern FNBS3FAR bs3CpuBasic2_iret_opsize;
142extern FNBS3FAR bs3CpuBasic2_iret_rexw;
143
144extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
145extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
146extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
147extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
148extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
149extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
150extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
151extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
152extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
153extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
156
157extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
158extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
159extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
160extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
161extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
162extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
163extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
166extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
167extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
168extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
169
170extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
171extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
172extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
173extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
174extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
175extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
176extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
177extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
178extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
179extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
180extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
181extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
182extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
183
184extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
185extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
186extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
187extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
188extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
189extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
190extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
191extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
192extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
193extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
194extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
195extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
196
197
198/* bs3-cpu-basic-2-template.mac: */
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
201FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
206FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
208
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
211FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
212FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
213FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
214FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
215FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
216FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
217FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
218
219FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
220FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
221FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
222FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
223FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
224FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
225FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
226FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
227FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
228
229
230/*********************************************************************************************************************************
231* Global Variables *
232*********************************************************************************************************************************/
233static const char BS3_FAR *g_pszTestMode = (const char *)1;
234static uint8_t g_bTestMode = 1;
235static bool g_f16BitSys = 1;
236
237
238/** SIDT test workers. */
239static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
240{
241 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
242 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
243 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
244 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
245 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
246 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
247 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
248 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
249 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
250 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
251 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
252 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
253};
254
255/** SGDT test workers. */
256static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
257{
258 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
259 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
260 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
261 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
262 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
263 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
264 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
265 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
266 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
267 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
268 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
269 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
270};
271
272/** LIDT test workers. */
273static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
274{
275 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
276 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
277 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
278 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
279 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
280 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
281 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
282 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
283 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
284 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
285 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
286 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
287 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
288};
289
290/** LGDT test workers. */
291static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
292{
293 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
294 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
295 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
296 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
297 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
298 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
299 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
300 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
301 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
302 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
303 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
304 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
305};
306
307
308
309#if 0
310/** Table containing invalid CS selector types. */
311static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
312{
313 { X86_SEL_TYPE_RO, 1 },
314 { X86_SEL_TYPE_RO_ACC, 1 },
315 { X86_SEL_TYPE_RW, 1 },
316 { X86_SEL_TYPE_RW_ACC, 1 },
317 { X86_SEL_TYPE_RO_DOWN, 1 },
318 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
319 { X86_SEL_TYPE_RW_DOWN, 1 },
320 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
321 { 0, 0 },
322 { 1, 0 },
323 { 2, 0 },
324 { 3, 0 },
325 { 4, 0 },
326 { 5, 0 },
327 { 6, 0 },
328 { 7, 0 },
329 { 8, 0 },
330 { 9, 0 },
331 { 10, 0 },
332 { 11, 0 },
333 { 12, 0 },
334 { 13, 0 },
335 { 14, 0 },
336 { 15, 0 },
337};
338
339/** Table containing invalid SS selector types. */
340static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
341{
342 { X86_SEL_TYPE_EO, 1 },
343 { X86_SEL_TYPE_EO_ACC, 1 },
344 { X86_SEL_TYPE_ER, 1 },
345 { X86_SEL_TYPE_ER_ACC, 1 },
346 { X86_SEL_TYPE_EO_CONF, 1 },
347 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
348 { X86_SEL_TYPE_ER_CONF, 1 },
349 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
350 { 0, 0 },
351 { 1, 0 },
352 { 2, 0 },
353 { 3, 0 },
354 { 4, 0 },
355 { 5, 0 },
356 { 6, 0 },
357 { 7, 0 },
358 { 8, 0 },
359 { 9, 0 },
360 { 10, 0 },
361 { 11, 0 },
362 { 12, 0 },
363 { 13, 0 },
364 { 14, 0 },
365 { 15, 0 },
366};
367#endif
368
369
370static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
371{
372 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
373 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
374 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
375 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
376 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
377 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
378 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
379 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
380 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
381};
382
383static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
384{
385 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
386 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
387 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
388 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
389 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
390 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
391 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
392 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
393 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
394};
395
396static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
397{
398 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
399 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
400 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
401 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
402 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
403 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
404 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
405 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
406 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
407};
408
409static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
410{
411 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
412 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
413 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
414 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
415};
416
417
418/**
419 * Sets globals according to the mode.
420 *
421 * @param bTestMode The test mode.
422 */
423static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
424{
425 g_bTestMode = bTestMode;
426 g_pszTestMode = Bs3GetModeName(bTestMode);
427 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
428 g_usBs3TestStep = 0;
429}
430
431
432uint32_t ASMGetESP(void);
433#pragma aux ASMGetESP = \
434 ".386" \
435 "mov ax, sp" \
436 "mov edx, esp" \
437 "shr edx, 16" \
438 value [ax dx] \
439 modify exact [ax dx];
440
441
442/**
443 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
444 * and g_pszTestMode.
445 */
446static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
447{
448 va_list va;
449
450 char szTmp[168];
451 va_start(va, pszFormat);
452 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
453 va_end(va);
454
455 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
456}
457
458
459#if 0
460/**
461 * Compares trap stuff.
462 */
463static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
464{
465 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
466 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
467 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
468 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
469 if (Bs3TestSubErrorCount() != cErrorsBefore)
470 {
471 Bs3TrapPrintFrame(pTrapCtx);
472#if 1
473 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
474 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
475 ASMHalt();
476#endif
477 }
478}
479#endif
480
481
482#if 0
483/**
484 * Compares trap stuff.
485 */
486static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
487 uint8_t bXcpt, uint16_t uHandlerCs)
488{
489 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
490 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
491 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
492 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
493 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
494 if (Bs3TestSubErrorCount() != cErrorsBefore)
495 {
496 Bs3TrapPrintFrame(pTrapCtx);
497#if 1
498 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
499 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
500 ASMHalt();
501#endif
502 }
503}
504#endif
505
506/**
507 * Compares a CPU trap.
508 */
509static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
510 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
511{
512 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
513 uint32_t fExtraEfl;
514
515 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
516 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
517
518 if ( g_f16BitSys
519 || bXcpt == X86_XCPT_DB /* hack (10980xe)... */
520 || ( !f486ResumeFlagHint
521 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
522 fExtraEfl = 0;
523 else
524 fExtraEfl = X86_EFL_RF;
525#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
526 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
527#endif
528 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
529 if (Bs3TestSubErrorCount() != cErrorsBefore)
530 {
531 Bs3TrapPrintFrame(pTrapCtx);
532#if 1
533 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
534 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
535 ASMHalt();
536#endif
537 }
538}
539
540
541/**
542 * Compares \#GP trap.
543 */
544static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
545{
546 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
547}
548
549#if 0
550/**
551 * Compares \#NP trap.
552 */
553static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
554{
555 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
556}
557#endif
558
559/**
560 * Compares \#SS trap.
561 */
562static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
563{
564 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
565}
566
567#if 0
568/**
569 * Compares \#TS trap.
570 */
571static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
572{
573 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
574}
575#endif
576
577/**
578 * Compares \#PF trap.
579 */
580static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
581 uint64_t uCr2Expected, uint8_t cbIpAdjust)
582{
583 uint64_t const uCr2Saved = pStartCtx->cr2.u;
584 pStartCtx->cr2.u = uCr2Expected;
585 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
586 pStartCtx->cr2.u = uCr2Saved;
587}
588
589/**
590 * Compares \#UD trap.
591 */
592static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
593{
594 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
595 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
596}
597
598/**
599 * Compares \#AC trap.
600 */
601static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
602{
603 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
604}
605
606/**
607 * Compares \#DB trap.
608 */
609static void bs3CpuBasic2_CompareDbCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint32_t fDr6Expect)
610{
611 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
612 uint32_t const fDr6 = Bs3RegGetDr6();
613 fDr6Expect |= X86_DR6_RA1_MASK;
614 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
615
616 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_DB, false /*f486ResumeFlagHint?*/, 0 /*cbIpAdjust*/);
617
618 if (Bs3TestSubErrorCount() > cErrorsBefore)
619 {
620#if 0
621 Bs3TestPrintf("Halting\n");
622 ASMHalt();
623#endif
624 }
625}
626
627
628/**
629 * Checks that DR6 has the initial value, i.e. is unchanged when other exception
630 * was raised before a \#DB could occur.
631 */
632static void bs3CpuBasic2_CheckDr6InitVal(void)
633{
634 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
635 uint32_t const fDr6 = Bs3RegGetDr6();
636 uint32_t const fDr6Expect = X86_DR6_INIT_VAL;
637 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
638 if (Bs3TestSubErrorCount() > cErrorsBefore)
639 {
640 Bs3TestPrintf("Halting\n");
641 ASMHalt();
642 }
643}
644
645#if 0 /* convert me */
646static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
647 PX86DESC const paIdt, unsigned const cIdteShift)
648{
649 BS3TRAPFRAME TrapCtx;
650 BS3REGCTX Ctx80;
651 BS3REGCTX Ctx81;
652 BS3REGCTX Ctx82;
653 BS3REGCTX Ctx83;
654 BS3REGCTX CtxTmp;
655 BS3REGCTX CtxTmp2;
656 PBS3REGCTX apCtx8x[4];
657 unsigned iCtx;
658 unsigned iRing;
659 unsigned iDpl;
660 unsigned iRpl;
661 unsigned i, j, k;
662 uint32_t uExpected;
663 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
664# if TMPL_BITS == 16
665 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
666 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
667# else
668 bool const f286 = false;
669 bool const f386Plus = true;
670 int rc;
671 uint8_t *pbIdtCopyAlloc;
672 PX86DESC pIdtCopy;
673 const unsigned cbIdte = 1 << (3 + cIdteShift);
674 RTCCUINTXREG uCr0Saved = ASMGetCR0();
675 RTGDTR GdtrSaved;
676# endif
677 RTIDTR IdtrSaved;
678 RTIDTR Idtr;
679
680 ASMGetIDTR(&IdtrSaved);
681# if TMPL_BITS != 16
682 ASMGetGDTR(&GdtrSaved);
683# endif
684
685 /* make sure they're allocated */
686 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
687 Bs3MemZero(&Ctx80, sizeof(Ctx80));
688 Bs3MemZero(&Ctx81, sizeof(Ctx81));
689 Bs3MemZero(&Ctx82, sizeof(Ctx82));
690 Bs3MemZero(&Ctx83, sizeof(Ctx83));
691 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
692 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
693
694 /* Context array. */
695 apCtx8x[0] = &Ctx80;
696 apCtx8x[1] = &Ctx81;
697 apCtx8x[2] = &Ctx82;
698 apCtx8x[3] = &Ctx83;
699
700# if TMPL_BITS != 16
701 /* Allocate memory for playing around with the IDT. */
702 pbIdtCopyAlloc = NULL;
703 if (BS3_MODE_IS_PAGED(g_bTestMode))
704 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
705# endif
706
707 /*
708 * IDT entry 80 thru 83 are assigned DPLs according to the number.
709 * (We'll be useing more, but this'll do for now.)
710 */
711 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
712 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
713 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
714 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
715
716 Bs3RegCtxSave(&Ctx80);
717 Ctx80.rsp.u -= 0x300;
718 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
719# if TMPL_BITS == 16
720 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
721# elif TMPL_BITS == 32
722 g_uBs3TrapEipHint = Ctx80.rip.u32;
723# endif
724 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
725 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
726 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
727 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
728 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
729 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
730
731 /*
732 * Check that all the above gates work from ring-0.
733 */
734 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
735 {
736 g_usBs3TestStep = iCtx;
737# if TMPL_BITS == 32
738 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
739# endif
740 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
741 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
742 }
743
744 /*
745 * Check that the gate DPL checks works.
746 */
747 g_usBs3TestStep = 100;
748 for (iRing = 0; iRing <= 3; iRing++)
749 {
750 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
751 {
752 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
753 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
754# if TMPL_BITS == 32
755 g_uBs3TrapEipHint = CtxTmp.rip.u32;
756# endif
757 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
758 if (iCtx < iRing)
759 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
760 else
761 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
762 g_usBs3TestStep++;
763 }
764 }
765
766 /*
767 * Modify the gate CS value and run the handler at a different CPL.
768 * Throw RPL variations into the mix (completely ignored) together
769 * with gate presence.
770 * 1. CPL <= GATE.DPL
771 * 2. GATE.P
772 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
773 */
774 g_usBs3TestStep = 1000;
775 for (i = 0; i <= 3; i++)
776 {
777 for (iRing = 0; iRing <= 3; iRing++)
778 {
779 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
780 {
781# if TMPL_BITS == 32
782 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
783# endif
784 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
785 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
786
787 for (j = 0; j <= 3; j++)
788 {
789 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
790 for (k = 0; k < 2; k++)
791 {
792 g_usBs3TestStep++;
793 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
794 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
795 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
796 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
797 /*Bs3TrapPrintFrame(&TrapCtx);*/
798 if (iCtx < iRing)
799 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
800 else if (k == 0)
801 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
802 else if (i > iRing)
803 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
804 else
805 {
806 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
807 if (i <= iCtx && i <= iRing)
808 uExpectedCs |= i;
809 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
810 }
811 }
812 }
813
814 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
815 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
816 }
817 }
818 }
819 BS3_ASSERT(g_usBs3TestStep < 1600);
820
821 /*
822 * Various CS and SS related faults
823 *
824 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
825 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
826 * without making it impossible to handle faults.
827 */
828 g_usBs3TestStep = 1600;
829 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
830 Bs3GdteTestPage00.Gen.u1Present = 0;
831 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
832 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
833
834 /* CS.PRESENT = 0 */
835 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
836 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
837 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
838 bs3CpuBasic2_FailedF("selector was accessed");
839 g_usBs3TestStep++;
840
841 /* Check that GATE.DPL is checked before CS.PRESENT. */
842 for (iRing = 1; iRing < 4; iRing++)
843 {
844 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
845 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
846 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
847 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
848 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
849 bs3CpuBasic2_FailedF("selector was accessed");
850 g_usBs3TestStep++;
851 }
852
853 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
854 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
855 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
856 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
857 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
858 bs3CpuBasic2_FailedF("CS selector was accessed");
859 g_usBs3TestStep++;
860 for (iDpl = 1; iDpl < 4; iDpl++)
861 {
862 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
863 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
864 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
865 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
866 bs3CpuBasic2_FailedF("CS selector was accessed");
867 g_usBs3TestStep++;
868 }
869
870 /* 1608: Check all the invalid CS selector types alone. */
871 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
872 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
873 {
874 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
875 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
876 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
877 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
878 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
879 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
880 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
881 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
882 g_usBs3TestStep++;
883
884 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
885 Bs3GdteTestPage00.Gen.u1Present = 0;
886 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
887 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
888 Bs3GdteTestPage00.Gen.u1Present = 1;
889 g_usBs3TestStep++;
890 }
891
892 /* Fix CS again. */
893 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
894
895 /* 1632: Test SS. */
896 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
897 {
898 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
899 uint16_t const uSavedSs2 = *puTssSs2;
900 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
901
902 /* Make the handler execute in ring-2. */
903 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
904 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
905 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
906
907 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
908 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
909 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
910 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
911 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
912 bs3CpuBasic2_FailedF("CS selector was not access");
913 g_usBs3TestStep++;
914
915 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
916 that we get #SS if the selector isn't present. */
917 i = 0; /* used for cycling thru invalid CS types */
918 for (k = 0; k < 10; k++)
919 {
920 /* k=0: present,
921 k=1: not-present,
922 k=2: present but very low limit,
923 k=3: not-present, low limit.
924 k=4: present, read-only.
925 k=5: not-present, read-only.
926 k=6: present, code-selector.
927 k=7: not-present, code-selector.
928 k=8: present, read-write / no access + system (=LDT).
929 k=9: not-present, read-write / no access + system (=LDT).
930 */
931 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
932 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
933 if (k >= 8)
934 {
935 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
936 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
937 }
938 else if (k >= 6)
939 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
940 else if (k >= 4)
941 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
942 else if (k >= 2)
943 {
944 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
945 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
946 Bs3GdteTestPage03.Gen.u1Granularity = 0;
947 }
948
949 for (iDpl = 0; iDpl < 4; iDpl++)
950 {
951 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
952
953 for (iRpl = 0; iRpl < 4; iRpl++)
954 {
955 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
956 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
957 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
958 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
959 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
960 if (iRpl != 2 || iRpl != iDpl || k >= 4)
961 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
962 else if (k != 0)
963 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
964 k == 2 /*f486ResumeFlagHint*/);
965 else
966 {
967 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
968 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
969 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
970 }
971 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
972 bs3CpuBasic2_FailedF("CS selector was not access");
973 if ( TrapCtx.bXcpt == 0x83
974 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
975 {
976 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
977 bs3CpuBasic2_FailedF("SS selector was not accessed");
978 }
979 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
980 bs3CpuBasic2_FailedF("SS selector was accessed");
981 g_usBs3TestStep++;
982
983 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
984 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
985 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
986 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
987 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
988 g_usBs3TestStep++;
989
990 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
991 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
992 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
993 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
994 g_usBs3TestStep++;
995
996 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
997 Bs3GdteTestPage02.Gen.u1Present = 0;
998 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
999 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1000 Bs3GdteTestPage02.Gen.u1Present = 1;
1001 g_usBs3TestStep++;
1002
1003 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
1004 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
1005 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
1006 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1007 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1008 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
1009 Bs3GdteTestPage02.Gen.u1DescType = 1;
1010 g_usBs3TestStep++;
1011
1012 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
1013 The 286 had a simpler approach to these GP(0). */
1014 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
1015 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
1016 Bs3GdteTestPage02.Gen.u1Granularity = 0;
1017 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1018 if (f286)
1019 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1020 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
1021 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1022 else if (k != 0)
1023 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
1024 else
1025 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1026 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1027 g_usBs3TestStep++;
1028 }
1029 }
1030 }
1031
1032 /* Check all the invalid SS selector types alone. */
1033 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1034 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1035 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1036 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1037 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1038 g_usBs3TestStep++;
1039 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
1040 {
1041 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
1042 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
1043 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1044 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1045 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
1046 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
1047 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
1048 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
1049 g_usBs3TestStep++;
1050 }
1051
1052 /*
1053 * Continue the SS experiments with a expand down segment. We'll use
1054 * the same setup as we already have with gate 83h being DPL and
1055 * having CS.DPL=2.
1056 *
1057 * Expand down segments are weird. The valid area is practically speaking
1058 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1059 * addresses from 0xffff thru 0x6001.
1060 *
1061 * So, with expand down segments we can more easily cut partially into the
1062 * pushing of the iret frame and trigger more interesting behavior than
1063 * with regular "expand up" segments where the whole pushing area is either
1064 * all fine or not not fine.
1065 */
1066 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1067 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1068 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1069 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1070 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1071
1072 /* First test, limit = max --> no bytes accessible --> #GP */
1073 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1074 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1075
1076 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1077 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1078 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1079 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1080 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1081
1082 /* Modify the gate handler to be a dummy that immediately does UD2
1083 and triggers #UD, then advance the limit down till we get the #UD. */
1084 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1085
1086 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1087 if (g_f16BitSys)
1088 {
1089 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1090 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1091 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1092 }
1093 else
1094 {
1095 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1096 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1097 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1098 }
1099 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1100 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1101 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1102 CtxTmp2.bCpl = 2;
1103
1104 /* test run. */
1105 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1106 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1107 g_usBs3TestStep++;
1108
1109 /* Real run. */
1110 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1111 while (i-- > 0)
1112 {
1113 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1114 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1115 if (i > 0)
1116 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1117 else
1118 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1119 g_usBs3TestStep++;
1120 }
1121
1122 /* Do a run where we do the same-ring kind of access. */
1123 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1124 if (g_f16BitSys)
1125 {
1126 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1127 i = 2*3 - 1;
1128 }
1129 else
1130 {
1131 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1132 i = 4*3 - 1;
1133 }
1134 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1135 CtxTmp2.ds = CtxTmp.ds;
1136 CtxTmp2.es = CtxTmp.es;
1137 CtxTmp2.fs = CtxTmp.fs;
1138 CtxTmp2.gs = CtxTmp.gs;
1139 while (i-- > 0)
1140 {
1141 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1142 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1143 if (i > 0)
1144 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1145 else
1146 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1147 g_usBs3TestStep++;
1148 }
1149
1150 *puTssSs2 = uSavedSs2;
1151 paIdt[0x83 << cIdteShift] = SavedGate83;
1152 }
1153 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1154 BS3_ASSERT(g_usBs3TestStep < 3000);
1155
1156 /*
1157 * Modify the gate CS value with a conforming segment.
1158 */
1159 g_usBs3TestStep = 3000;
1160 for (i = 0; i <= 3; i++) /* cs.dpl */
1161 {
1162 for (iRing = 0; iRing <= 3; iRing++)
1163 {
1164 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1165 {
1166 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1167 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1168# if TMPL_BITS == 32
1169 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1170# endif
1171
1172 for (j = 0; j <= 3; j++) /* rpl */
1173 {
1174 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1175 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1176 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1177 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1178 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1179 /*Bs3TrapPrintFrame(&TrapCtx);*/
1180 g_usBs3TestStep++;
1181 if (iCtx < iRing)
1182 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1183 else if (i > iRing)
1184 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1185 else
1186 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1187 }
1188 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1189 }
1190 }
1191 }
1192 BS3_ASSERT(g_usBs3TestStep < 3500);
1193
1194 /*
1195 * The gates must be 64-bit in long mode.
1196 */
1197 if (cIdteShift != 0)
1198 {
1199 g_usBs3TestStep = 3500;
1200 for (i = 0; i <= 3; i++)
1201 {
1202 for (iRing = 0; iRing <= 3; iRing++)
1203 {
1204 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1205 {
1206 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1207 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1208
1209 for (j = 0; j < 2; j++)
1210 {
1211 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1212 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1213 g_usBs3TestStep++;
1214 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1215 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1216 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1217 /*Bs3TrapPrintFrame(&TrapCtx);*/
1218 if (iCtx < iRing)
1219 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1220 else
1221 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1222 }
1223 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1224 }
1225 }
1226 }
1227 BS3_ASSERT(g_usBs3TestStep < 4000);
1228 }
1229
1230 /*
1231 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1232 */
1233 g_usBs3TestStep = 5000;
1234 i = (0x80 << (cIdteShift + 3)) - 1;
1235 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1236 k = (0x83 << (cIdteShift + 3)) - 1;
1237 for (; i <= k; i++, g_usBs3TestStep++)
1238 {
1239 Idtr = IdtrSaved;
1240 Idtr.cbIdt = i;
1241 ASMSetIDTR(&Idtr);
1242 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1243 if (i < j)
1244 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1245 else
1246 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1247 }
1248 ASMSetIDTR(&IdtrSaved);
1249 BS3_ASSERT(g_usBs3TestStep < 5100);
1250
1251# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1252
1253 /*
1254 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1255 * first page and 0x81 is on the second page. We need proceed to move
1256 * it down byte by byte to check that any inaccessible byte means #PF.
1257 *
1258 * Note! We must reload the alternative IDTR for each run as any kind of
1259 * printing to the string (like error reporting) will cause a switch
1260 * to real mode and back, reloading the default IDTR.
1261 */
1262 g_usBs3TestStep = 5200;
1263 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1264 {
1265 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1266 for (j = 0; j < cbIdte; j++)
1267 {
1268 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1269 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1270
1271 Idtr.cbIdt = IdtrSaved.cbIdt;
1272 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1273
1274 ASMSetIDTR(&Idtr);
1275 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1276 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1277 g_usBs3TestStep++;
1278
1279 ASMSetIDTR(&Idtr);
1280 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1281 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1282 g_usBs3TestStep++;
1283
1284 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1285 if (RT_SUCCESS(rc))
1286 {
1287 ASMSetIDTR(&Idtr);
1288 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1289 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1290 g_usBs3TestStep++;
1291
1292 ASMSetIDTR(&Idtr);
1293 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1294 if (f486Plus)
1295 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1296 else
1297 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1298 g_usBs3TestStep++;
1299
1300 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1301
1302 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1303 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1304 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1305 if (RT_SUCCESS(rc))
1306 {
1307 ASMSetIDTR(&Idtr);
1308 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1309 if (f486Plus)
1310 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1311 else
1312 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1313 g_usBs3TestStep++;
1314
1315 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1316 }
1317 }
1318 else
1319 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1320
1321 ASMSetIDTR(&IdtrSaved);
1322 }
1323 }
1324
1325 /*
1326 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1327 */
1328 g_usBs3TestStep = 5300;
1329 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1330 {
1331 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1332 Idtr.cbIdt = IdtrSaved.cbIdt;
1333 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1334
1335 ASMSetIDTR(&Idtr);
1336 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1337 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1338 g_usBs3TestStep++;
1339
1340 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1341 if (RT_SUCCESS(rc))
1342 {
1343 ASMSetIDTR(&Idtr);
1344 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1345 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1346 g_usBs3TestStep++;
1347
1348 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1349 }
1350 ASMSetIDTR(&IdtrSaved);
1351 }
1352
1353 /*
1354 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1355 * with interrupt gates 80h and 83h, respectively.
1356 */
1357/** @todo Throw in SS.u1Accessed too. */
1358 g_usBs3TestStep = 5400;
1359 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1360 {
1361 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1362 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1363 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1364
1365 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1366 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1367 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1368
1369 /* Check that the CS.A bit is being set on a general basis and that
1370 the special CS values work with out generic handler code. */
1371 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1372 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1373 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1374 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1375 g_usBs3TestStep++;
1376
1377 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1378 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1379 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1380 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1381 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1382 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1383 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1384 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1385 g_usBs3TestStep++;
1386
1387 /*
1388 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1389 * fault due to the RW bit being zero.
1390 * (We check both with with and without the WP bit if 80486.)
1391 */
1392 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1393 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1394
1395 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1396 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1397 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1398 if (RT_SUCCESS(rc))
1399 {
1400 /* ring-0 handler */
1401 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1402 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1403 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1404 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1405 g_usBs3TestStep++;
1406
1407 /* ring-3 handler */
1408 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1409 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1410 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1411 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1412 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1413 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1414 g_usBs3TestStep++;
1415
1416 /* clear WP and repeat the above. */
1417 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1418 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1419 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1420 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1421
1422 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1423 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1424 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1425 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1426 g_usBs3TestStep++;
1427
1428 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1429 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1430 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1431 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1432 g_usBs3TestStep++;
1433
1434 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1435 }
1436
1437 ASMSetCR0(uCr0Saved);
1438
1439 /*
1440 * While we're here, check that if the CS GDT entry is a non-present
1441 * page we do get a #PF with the rigth error code and CR2.
1442 */
1443 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1444 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1445 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1446 if (RT_SUCCESS(rc))
1447 {
1448 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1449 if (f486Plus)
1450 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1451 else
1452 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1453 g_usBs3TestStep++;
1454
1455 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1456 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1457 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1458 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1459
1460 if (f486Plus)
1461 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1462 else
1463 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1464 g_usBs3TestStep++;
1465
1466 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1467 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1468 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1469 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1470 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1471 }
1472
1473 /* restore */
1474 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1475 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1476 }
1477
1478# endif /* 32 || 64*/
1479
1480 /*
1481 * Check broad EFLAGS effects.
1482 */
1483 g_usBs3TestStep = 5600;
1484 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1485 {
1486 for (iRing = 0; iRing < 4; iRing++)
1487 {
1488 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1489 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1490
1491 /* all set */
1492 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1493 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1494 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1495 if (f486Plus)
1496 CtxTmp.rflags.u32 |= X86_EFL_AC;
1497 if (f486Plus && !g_f16BitSys)
1498 CtxTmp.rflags.u32 |= X86_EFL_RF;
1499 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1500 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1501 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1502 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1503
1504 if (iCtx >= iRing)
1505 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1506 else
1507 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1508 uExpected = CtxTmp.rflags.u32
1509 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1510 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1511 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1512 if (TrapCtx.fHandlerRfl != uExpected)
1513 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1514 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1515 g_usBs3TestStep++;
1516
1517 /* all cleared */
1518 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1519 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1520 else
1521 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1522 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1523 if (iCtx >= iRing)
1524 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1525 else
1526 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1527 uExpected = CtxTmp.rflags.u32;
1528 if (TrapCtx.fHandlerRfl != uExpected)
1529 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1530 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1531 g_usBs3TestStep++;
1532 }
1533 }
1534
1535/** @todo CS.LIMIT / canonical(CS) */
1536
1537
1538 /*
1539 * Check invalid gate types.
1540 */
1541 g_usBs3TestStep = 32000;
1542 for (iRing = 0; iRing <= 3; iRing++)
1543 {
1544 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1545 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1546 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1547 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1548 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1549 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1550 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1551 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1552 /*286:*/ 12, 14, 15 };
1553 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1554 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1555 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1556
1557
1558 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1559 {
1560 unsigned iType;
1561
1562 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1563 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1564# if TMPL_BITS == 32
1565 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1566# endif
1567 for (iType = 0; iType < cInvTypes; iType++)
1568 {
1569 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1570 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1571 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1572
1573 for (i = 0; i < 4; i++)
1574 {
1575 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1576 {
1577 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1578 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1579 : s_auCSes[j] | i;
1580 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1581 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1582 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1583 g_usBs3TestStep++;
1584 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1585
1586 /* Mark it not-present to check that invalid type takes precedence. */
1587 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1588 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1589 g_usBs3TestStep++;
1590 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1591 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1592 }
1593 }
1594
1595 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1596 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1597 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1598 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1599 }
1600 }
1601 }
1602 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1603
1604
1605 /** @todo
1606 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1607 * - Quickly generate all faults.
1608 * - All the peculiarities v8086.
1609 */
1610
1611# if TMPL_BITS != 16
1612 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1613# endif
1614}
1615#endif /* convert me */
1616
1617
1618static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1619 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1620{
1621 BS3TRAPFRAME TrapCtx;
1622 BS3REGCTX Ctx;
1623 BS3REGCTX CtxUdExpected;
1624 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1625 uint8_t iRing;
1626 uint16_t iTest;
1627
1628 /* make sure they're allocated */
1629 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1630 Bs3MemZero(&Ctx, sizeof(Ctx));
1631 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1632
1633 /*
1634 * Test all relevant rings.
1635 *
1636 * The memory operand is ds:xBX, so point it to pbBuf.
1637 * The test snippets mostly use xAX as operand, with the div
1638 * one also using xDX, so make sure they make some sense.
1639 */
1640 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1641
1642 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1643
1644 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1645 {
1646 uint32_t uEbx;
1647 uint8_t fAc;
1648
1649 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1650 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1651
1652 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1653 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1654 else
1655 {
1656 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1657 Ctx.ds = BS3_FP_SEG(pbBuf);
1658 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1659 }
1660 uEbx = Ctx.rbx.u32;
1661
1662 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1663 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1664 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1665
1666 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1667
1668 /*
1669 * AC flag loop.
1670 */
1671 for (fAc = 0; fAc < 2; fAc++)
1672 {
1673 if (fAc)
1674 Ctx.rflags.u32 |= X86_EFL_AC;
1675 else
1676 Ctx.rflags.u32 &= ~X86_EFL_AC;
1677
1678 /*
1679 * Loop over the test snippets.
1680 */
1681 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1682 {
1683 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1684 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1685 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1686 uint16_t const cbMax = cbCacheLine + cbMem;
1687 uint16_t offMem;
1688 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1689 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1690 CtxUdExpected.rip = Ctx.rip;
1691 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1692 CtxUdExpected.cs = Ctx.cs;
1693 CtxUdExpected.rflags = Ctx.rflags;
1694 if (bMode == BS3_MODE_RM)
1695 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1696 CtxUdExpected.rdx = Ctx.rdx;
1697 CtxUdExpected.rax = Ctx.rax;
1698 if (fOp & MYOP_LD)
1699 {
1700 switch (cbMem)
1701 {
1702 case 2:
1703 CtxUdExpected.rax.u16 = 0x0101;
1704 break;
1705 case 4:
1706 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1707 break;
1708 case 8:
1709 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1710 break;
1711 }
1712 }
1713
1714 /*
1715 * Buffer misalignment loop.
1716 * Note! We must make sure to cross a cache line here to make sure
1717 * to cover the split-lock scenario. (The buffer is cache
1718 * line aligned.)
1719 */
1720 for (offMem = 0; offMem < cbMax; offMem++)
1721 {
1722 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1723 unsigned offBuf = cbMax + cbMem * 2;
1724 while (offBuf-- > 0)
1725 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1726
1727 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1728 if (BS3_MODE_IS_16BIT_SYS(bMode))
1729 g_uBs3TrapEipHint = Ctx.rip.u32;
1730
1731 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1732 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1733
1734 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1735
1736 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1737 && fMisaligned
1738 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1739 {
1740 if (fAc && bMode == BS3_MODE_RM)
1741 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1742 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1743 }
1744 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1745 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1746 X86_TRAP_PF_P | X86_TRAP_PF_US
1747 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1748 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1749 pCmn->paEntries[iTest].offFaultInstr);
1750 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1751 {
1752 if (fOp & MYOP_EFL)
1753 {
1754 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1755 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1756 }
1757 if (fOp == MYOP_LD_DIV)
1758 {
1759 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1760 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1761 }
1762 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1763 }
1764 else
1765 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1766
1767 g_usBs3TestStep++;
1768 }
1769 }
1770 }
1771 }
1772}
1773
1774
1775/**
1776 * Entrypoint for \#AC tests.
1777 *
1778 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1779 * @param bMode The CPU mode we're testing.
1780 *
1781 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1782 * with control registers and such.
1783 */
1784BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1785{
1786 unsigned cbCacheLine = 128; /** @todo detect */
1787 uint8_t BS3_FAR *pbBufAlloc;
1788 uint8_t BS3_FAR *pbBuf;
1789 unsigned idxCmnModes;
1790 uint32_t fCr0;
1791
1792 /*
1793 * Skip if 386 or older.
1794 */
1795 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1796 {
1797 Bs3TestSkipped("#AC test requires 486 or later");
1798 return BS3TESTDOMODE_SKIPPED;
1799 }
1800
1801 bs3CpuBasic2_SetGlobals(bMode);
1802
1803 /* Get us a 64-byte aligned buffer. */
1804 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1805 if (!pbBufAlloc)
1806 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1807 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1808 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1809 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1810 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1811
1812 /* Find the g_aCmnModes entry. */
1813 idxCmnModes = 0;
1814 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1815 idxCmnModes++;
1816 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1817
1818 /* First round is w/o alignment checks enabled. */
1819 //Bs3TestPrintf("round 1\n");
1820 fCr0 = Bs3RegGetCr0();
1821 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1822 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1823#if 1
1824 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1825#endif
1826
1827 /* The second round is with aligment checks enabled. */
1828#if 1
1829 //Bs3TestPrintf("round 2\n");
1830 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1831 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1832#endif
1833
1834#if 1
1835 /* The third and fourth round access the buffer via a page alias that's not
1836 accessible from ring-3. The third round has ACs disabled and the fourth
1837 has them enabled. */
1838 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1839 {
1840 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1841 /** @todo the aliasing is not necessary any more... */
1842 int rc;
1843 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1844 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1845 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1846 X86_PTE_P | X86_PTE_RW);
1847 if (RT_SUCCESS(rc))
1848 {
1849 /* We 'misalign' the segment base here to make sure it's the final
1850 address that gets alignment checked and not just the operand value. */
1851 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1852 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1853 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1854
1855 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1856 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1857 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1858 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1859
1860 //Bs3TestPrintf("round 4\n");
1861 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1862 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1863 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1864
1865 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1866 }
1867 else
1868 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1869 }
1870#endif
1871
1872 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1873 Bs3RegSetCr0(fCr0);
1874 return 0;
1875}
1876
1877
1878/**
1879 * Executes one round of SIDT and SGDT tests using one assembly worker.
1880 *
1881 * This is written with driving everything from the 16-bit or 32-bit worker in
1882 * mind, i.e. not assuming the test bitcount is the same as the current.
1883 */
1884static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1885 uint8_t const *pbExpected)
1886{
1887 BS3TRAPFRAME TrapCtx;
1888 BS3REGCTX Ctx;
1889 BS3REGCTX CtxUdExpected;
1890 BS3REGCTX TmpCtx;
1891 uint8_t const cbBuf = 8*2; /* test buffer area */
1892 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1893 uint8_t BS3_FAR *pbBuf = abBuf;
1894 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1895 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1896 uint8_t bFiller;
1897 int off;
1898 int off2;
1899 unsigned cb;
1900 uint8_t BS3_FAR *pbTest;
1901
1902 /* make sure they're allocated */
1903 Bs3MemZero(&Ctx, sizeof(Ctx));
1904 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1905 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1906 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1907 Bs3MemZero(&abBuf, sizeof(abBuf));
1908
1909 /* Create a context, give this routine some more stack space, point the context
1910 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1911 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1912 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1913 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1914 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1915 g_uBs3TrapEipHint = Ctx.rip.u32;
1916 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1917 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1918
1919 /* For successful SIDT attempts, we'll stop at the UD2. */
1920 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1921 CtxUdExpected.rip.u += pWorker->cbInstr;
1922
1923 /*
1924 * Check that it works at all and that only bytes we expect gets written to.
1925 */
1926 /* First with zero buffer. */
1927 Bs3MemZero(abBuf, sizeof(abBuf));
1928 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1929 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1930 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1931 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1932 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1933 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1934 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1935 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1936 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1937 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1938 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1939 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1940 g_usBs3TestStep++;
1941
1942 /* Again with a buffer filled with a byte not occuring in the previous result. */
1943 bFiller = 0x55;
1944 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1945 bFiller++;
1946 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1947 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1948 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1949
1950 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1951 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1952 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1953 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1954 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1955 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1956 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1957 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1958 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1959 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1960 g_usBs3TestStep++;
1961
1962 /*
1963 * Slide the buffer along 8 bytes to cover misalignment.
1964 */
1965 for (off = 0; off < 8; off++)
1966 {
1967 pbBuf = &abBuf[off];
1968 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1969 CtxUdExpected.rbx.u = Ctx.rbx.u;
1970
1971 /* First with zero buffer. */
1972 Bs3MemZero(abBuf, sizeof(abBuf));
1973 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1974 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1975 if (off > 0 && !ASMMemIsZero(abBuf, off))
1976 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1977 cbIdtr, off, off + cbBuf, abBuf);
1978 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1979 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1980 cbIdtr, off, off + cbBuf, abBuf);
1981 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1982 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1983 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1984 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1985 g_usBs3TestStep++;
1986
1987 /* Again with a buffer filled with a byte not occuring in the previous result. */
1988 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1989 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1990 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1991 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1992 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1993 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1994 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1995 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1996 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1997 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1998 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1999 cbIdtr, off, bFiller, off + cbBuf, abBuf);
2000 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2001 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
2002 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2003 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2004 g_usBs3TestStep++;
2005 }
2006 pbBuf = abBuf;
2007 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2008 CtxUdExpected.rbx.u = Ctx.rbx.u;
2009
2010 /*
2011 * Play with the selector limit if the target mode supports limit checking
2012 * We use BS3_SEL_TEST_PAGE_00 for this
2013 */
2014 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2015 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2016 {
2017 uint16_t cbLimit;
2018 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
2019 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2020 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2021 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2022 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2023 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2024
2025 if (pWorker->fSs)
2026 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2027 else
2028 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2029
2030 /* Expand up (normal). */
2031 for (off = 0; off < 8; off++)
2032 {
2033 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2034 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2035 {
2036 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2037 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2038 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2039 if (off + cbIdtr <= cbLimit + 1)
2040 {
2041 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2042 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2043 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2044 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2045 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2046 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2047 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2048 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2049 }
2050 else
2051 {
2052 if (pWorker->fSs)
2053 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2054 else
2055 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2056 if (off + 2 <= cbLimit + 1)
2057 {
2058 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2059 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2060 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2061 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2062 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2063 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2064 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2065 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2066 }
2067 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2068 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2069 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2070 }
2071
2072 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2073 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2074 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2075 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2076 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2077 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2078
2079 g_usBs3TestStep++;
2080 }
2081 }
2082
2083 /* Expand down (weird). Inverted valid area compared to expand up,
2084 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2085 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2086 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2087 (because in a normal expand up the 0ffffh means all 64KB are
2088 accessible). */
2089 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2090 for (off = 0; off < 8; off++)
2091 {
2092 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2093 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2094 {
2095 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2096 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2097 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2098
2099 if (off > cbLimit)
2100 {
2101 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2102 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2103 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2104 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2105 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2106 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2107 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2108 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2109 }
2110 else
2111 {
2112 if (pWorker->fSs)
2113 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2114 else
2115 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2116 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2117 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2118 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2119 }
2120
2121 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2122 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2123 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2124 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2125 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2126 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2127
2128 g_usBs3TestStep++;
2129 }
2130 }
2131
2132 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2133 CtxUdExpected.rbx.u = Ctx.rbx.u;
2134 CtxUdExpected.ss = Ctx.ss;
2135 CtxUdExpected.ds = Ctx.ds;
2136 }
2137
2138 /*
2139 * Play with the paging.
2140 */
2141 if ( BS3_MODE_IS_PAGED(bTestMode)
2142 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2143 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2144 {
2145 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2146
2147 /*
2148 * Slide the buffer towards the trailing guard page. We'll observe the
2149 * first word being written entirely separately from the 2nd dword/qword.
2150 */
2151 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2152 {
2153 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2154 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2155 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2156 if (off + cbIdtr <= X86_PAGE_SIZE)
2157 {
2158 CtxUdExpected.rbx = Ctx.rbx;
2159 CtxUdExpected.ss = Ctx.ss;
2160 CtxUdExpected.ds = Ctx.ds;
2161 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2162 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2163 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2164 }
2165 else
2166 {
2167 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2168 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2169 if ( off <= X86_PAGE_SIZE - 2
2170 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2171 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2172 pbExpected, &pbTest[off], off);
2173 if ( off < X86_PAGE_SIZE - 2
2174 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2175 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2176 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2177 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2178 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2179 }
2180 g_usBs3TestStep++;
2181 }
2182
2183 /*
2184 * Now, do it the other way around. It should look normal now since writing
2185 * the limit will #PF first and nothing should be written.
2186 */
2187 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2188 {
2189 Bs3MemSet(pbTest, bFiller, 48);
2190 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2191 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2192 if (off >= 0)
2193 {
2194 CtxUdExpected.rbx = Ctx.rbx;
2195 CtxUdExpected.ss = Ctx.ss;
2196 CtxUdExpected.ds = Ctx.ds;
2197 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2198 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2199 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2200 }
2201 else
2202 {
2203 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2204 uFlatTest + off, 0 /*cbIpAdjust*/);
2205 if ( -off < cbIdtr
2206 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2207 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2208 bFiller, cbIdtr + off, pbTest, off);
2209 }
2210 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2211 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2212 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2213 g_usBs3TestStep++;
2214 }
2215
2216 /*
2217 * Combine paging and segment limit and check ordering.
2218 * This is kind of interesting here since it the instruction seems to
2219 * be doing two separate writes.
2220 */
2221 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2222 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2223 {
2224 uint16_t cbLimit;
2225
2226 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2227 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2228 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2229 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2230 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2231
2232 if (pWorker->fSs)
2233 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2234 else
2235 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2236
2237 /* Expand up (normal), approaching tail guard page. */
2238 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2239 {
2240 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2241 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2242 {
2243 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2244 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2245 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2246 if (off + cbIdtr <= cbLimit + 1)
2247 {
2248 /* No #GP, but maybe #PF. */
2249 if (off + cbIdtr <= X86_PAGE_SIZE)
2250 {
2251 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2252 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2253 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2254 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2255 }
2256 else
2257 {
2258 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2259 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2260 if ( off <= X86_PAGE_SIZE - 2
2261 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2262 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2263 pbExpected, &pbTest[off], off);
2264 cb = X86_PAGE_SIZE - off - 2;
2265 if ( off < X86_PAGE_SIZE - 2
2266 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2267 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2268 bFiller, cb, &pbTest[off + 2], off);
2269 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2270 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2271 }
2272 }
2273 else if (off + 2 <= cbLimit + 1)
2274 {
2275 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2276 if (off <= X86_PAGE_SIZE - 2)
2277 {
2278 if (pWorker->fSs)
2279 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2280 else
2281 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2282 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2283 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2284 pbExpected, &pbTest[off], off);
2285 cb = X86_PAGE_SIZE - off - 2;
2286 if ( off < X86_PAGE_SIZE - 2
2287 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2288 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2289 bFiller, cb, &pbTest[off + 2], off);
2290 }
2291 else
2292 {
2293 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2294 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2295 if ( off < X86_PAGE_SIZE
2296 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2297 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2298 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2299 }
2300 }
2301 else
2302 {
2303 /* #GP/#SS on limit. */
2304 if (pWorker->fSs)
2305 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2306 else
2307 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2308 if ( off < X86_PAGE_SIZE
2309 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2310 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2311 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2312 }
2313
2314 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2315 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2316 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2317 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2318
2319 g_usBs3TestStep++;
2320
2321 /* Set DS to 0 and check that we get #GP(0). */
2322 if (!pWorker->fSs)
2323 {
2324 Ctx.ds = 0;
2325 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2326 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2327 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2328 g_usBs3TestStep++;
2329 }
2330 }
2331 }
2332
2333 /* Expand down. */
2334 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2335 uFlatTest -= X86_PAGE_SIZE;
2336
2337 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2338 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2339 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2340 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2341
2342 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2343 {
2344 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2345 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2346 {
2347 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2348 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2349 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2350 if (cbLimit < off && off >= X86_PAGE_SIZE)
2351 {
2352 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2353 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2354 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2355 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2356 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2357 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2358 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2359 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2360 }
2361 else
2362 {
2363 if (cbLimit < off && off < X86_PAGE_SIZE)
2364 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2365 uFlatTest + off, 0 /*cbIpAdjust*/);
2366 else if (pWorker->fSs)
2367 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2368 else
2369 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2370 cb = cbIdtr*2;
2371 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2372 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2373 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2374 }
2375 g_usBs3TestStep++;
2376 }
2377 }
2378
2379 pbTest += X86_PAGE_SIZE;
2380 uFlatTest += X86_PAGE_SIZE;
2381 }
2382
2383 Bs3MemGuardedTestPageFree(pbTest);
2384 }
2385
2386 /*
2387 * Check non-canonical 64-bit space.
2388 */
2389 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2390 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2391 {
2392 /* Make our references relative to the gap. */
2393 pbTest += g_cbBs3PagingOneCanonicalTrap;
2394
2395 /* Hit it from below. */
2396 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2397 {
2398 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2399 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2400 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2401 if (off + cbIdtr <= 0)
2402 {
2403 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2404 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2405 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2406 }
2407 else
2408 {
2409 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2410 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2411 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2412 off2 = off <= -2 ? 2 : 0;
2413 cb = cbIdtr - off2;
2414 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2415 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2416 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2417 }
2418 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2419 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2420 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2421 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2422 }
2423
2424 /* Hit it from above. */
2425 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2426 {
2427 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2428 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2429 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2430 if (off >= 0)
2431 {
2432 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2433 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2434 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2435 }
2436 else
2437 {
2438 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2439 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2440 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2441 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2442 }
2443 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2444 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2445 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2446 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2447 }
2448
2449 }
2450}
2451
2452
2453static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2454 uint8_t const *pbExpected)
2455{
2456 unsigned idx;
2457 unsigned bRing;
2458 unsigned iStep = 0;
2459
2460 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2461 test and don't want to bother with double faults. */
2462 for (bRing = 0; bRing <= 3; bRing++)
2463 {
2464 for (idx = 0; idx < cWorkers; idx++)
2465 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2466 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2467 {
2468 g_usBs3TestStep = iStep;
2469 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2470 iStep += 1000;
2471 }
2472 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2473 break;
2474 }
2475}
2476
2477
2478BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2479{
2480 union
2481 {
2482 RTIDTR Idtr;
2483 uint8_t ab[16];
2484 } Expected;
2485
2486 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2487 bs3CpuBasic2_SetGlobals(bMode);
2488
2489 /*
2490 * Pass to common worker which is only compiled once per mode.
2491 */
2492 Bs3MemZero(&Expected, sizeof(Expected));
2493 ASMGetIDTR(&Expected.Idtr);
2494 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2495
2496 /*
2497 * Re-initialize the IDT.
2498 */
2499 Bs3TrapReInit();
2500 return 0;
2501}
2502
2503
2504BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2505{
2506 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2507 uint64_t uNew = 0;
2508 union
2509 {
2510 RTGDTR Gdtr;
2511 uint8_t ab[16];
2512 } Expected;
2513
2514 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2515 bs3CpuBasic2_SetGlobals(bMode);
2516
2517 /*
2518 * If paged mode, try push the GDT way up.
2519 */
2520 Bs3MemZero(&Expected, sizeof(Expected));
2521 ASMGetGDTR(&Expected.Gdtr);
2522 if (BS3_MODE_IS_PAGED(bMode))
2523 {
2524/** @todo loading non-canonical base addresses. */
2525 int rc;
2526 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2527 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2528 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2529 if (RT_SUCCESS(rc))
2530 {
2531 Bs3Lgdt_Gdt.uAddr = uNew;
2532 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2533 ASMGetGDTR(&Expected.Gdtr);
2534 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2535 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2536 }
2537 }
2538
2539 /*
2540 * Pass to common worker which is only compiled once per mode.
2541 */
2542 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2543
2544 /*
2545 * Unalias the GDT.
2546 */
2547 if (uNew != 0)
2548 {
2549 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2550 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2551 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2552 }
2553
2554 /*
2555 * Re-initialize the IDT.
2556 */
2557 Bs3TrapReInit();
2558 return 0;
2559}
2560
2561
2562
2563/*
2564 * LIDT & LGDT
2565 */
2566
2567/**
2568 * Executes one round of LIDT and LGDT tests using one assembly worker.
2569 *
2570 * This is written with driving everything from the 16-bit or 32-bit worker in
2571 * mind, i.e. not assuming the test bitcount is the same as the current.
2572 */
2573static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2574 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2575{
2576 static const struct
2577 {
2578 bool fGP;
2579 uint16_t cbLimit;
2580 uint64_t u64Base;
2581 } s_aValues64[] =
2582 {
2583 { false, 0x0000, UINT64_C(0x0000000000000000) },
2584 { false, 0x0001, UINT64_C(0x0000000000000001) },
2585 { false, 0x0002, UINT64_C(0x0000000000000010) },
2586 { false, 0x0003, UINT64_C(0x0000000000000123) },
2587 { false, 0x0004, UINT64_C(0x0000000000001234) },
2588 { false, 0x0005, UINT64_C(0x0000000000012345) },
2589 { false, 0x0006, UINT64_C(0x0000000000123456) },
2590 { false, 0x0007, UINT64_C(0x0000000001234567) },
2591 { false, 0x0008, UINT64_C(0x0000000012345678) },
2592 { false, 0x0009, UINT64_C(0x0000000123456789) },
2593 { false, 0x000a, UINT64_C(0x000000123456789a) },
2594 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2595 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2596 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2597 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2598 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2599 { true, 0x0000, UINT64_C(0x0000800000000000) },
2600 { true, 0x0000, UINT64_C(0x0000800000000333) },
2601 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2602 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2603 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2604 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2605 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2606 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2607 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2608 { false, 0x5678, UINT64_C(0xffff800000000000) },
2609 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2610 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2611 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2612 };
2613 static const struct
2614 {
2615 uint16_t cbLimit;
2616 uint32_t u32Base;
2617 } s_aValues32[] =
2618 {
2619 { 0xdfdf, UINT32_C(0xefefefef) },
2620 { 0x0000, UINT32_C(0x00000000) },
2621 { 0x0001, UINT32_C(0x00000001) },
2622 { 0x0002, UINT32_C(0x00000012) },
2623 { 0x0003, UINT32_C(0x00000123) },
2624 { 0x0004, UINT32_C(0x00001234) },
2625 { 0x0005, UINT32_C(0x00012345) },
2626 { 0x0006, UINT32_C(0x00123456) },
2627 { 0x0007, UINT32_C(0x01234567) },
2628 { 0x0008, UINT32_C(0x12345678) },
2629 { 0x0009, UINT32_C(0x80204060) },
2630 { 0x000a, UINT32_C(0xddeeffaa) },
2631 { 0x000b, UINT32_C(0xfdecdbca) },
2632 { 0x000c, UINT32_C(0x6098456b) },
2633 { 0x000d, UINT32_C(0x98506099) },
2634 { 0x000e, UINT32_C(0x206950bc) },
2635 { 0x000f, UINT32_C(0x9740395d) },
2636 { 0x0334, UINT32_C(0x64a9455e) },
2637 { 0xb423, UINT32_C(0xd20b6eff) },
2638 { 0x4955, UINT32_C(0x85296d46) },
2639 { 0xffff, UINT32_C(0x07000039) },
2640 { 0xefe1, UINT32_C(0x0007fe00) },
2641 };
2642
2643 BS3TRAPFRAME TrapCtx;
2644 BS3REGCTX Ctx;
2645 BS3REGCTX CtxUdExpected;
2646 BS3REGCTX TmpCtx;
2647 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2648 uint8_t abBufSave[32]; /* For saving the result after loading. */
2649 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2650 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2651 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2652 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2653 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2654 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2655 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2656 ? 3 : 4;
2657 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2658 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2659 uint8_t bFiller1; /* For filling abBufLoad. */
2660 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2661 int off;
2662 uint8_t BS3_FAR *pbTest;
2663 unsigned i;
2664
2665 /* make sure they're allocated */
2666 Bs3MemZero(&Ctx, sizeof(Ctx));
2667 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2668 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2669 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2670 Bs3MemZero(abBufSave, sizeof(abBufSave));
2671 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2672 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2673
2674 /*
2675 * Create a context, giving this routine some more stack space.
2676 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2677 * - Point DS/SS:xBX at abBufLoad.
2678 * - Point ES:xDI at abBufSave.
2679 * - Point ES:xSI at abBufRestore.
2680 */
2681 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2682 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2683 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2684 g_uBs3TrapEipHint = Ctx.rip.u32;
2685 Ctx.rflags.u16 &= ~X86_EFL_IF;
2686 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2687
2688 pbBufSave = abBufSave;
2689 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2690 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2691 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2692
2693 pbBufRestore = abBufRestore;
2694 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2695 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2696 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2697 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2698
2699 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2700 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2701
2702 /* For successful SIDT attempts, we'll stop at the UD2. */
2703 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2704 CtxUdExpected.rip.u += pWorker->cbInstr;
2705
2706 /*
2707 * Check that it works at all.
2708 */
2709 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2710 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2711 Bs3MemZero(abBufSave, sizeof(abBufSave));
2712 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2713 if (bRing != 0)
2714 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2715 else
2716 {
2717 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2718 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2719 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2720 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2721 }
2722 g_usBs3TestStep++;
2723
2724 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2725 bFiller1 = ~0x55;
2726 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2727 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2728 || bFiller1 == 0xff)
2729 bFiller1++;
2730 bFiller2 = 0x33;
2731 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2732 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2733 || bFiller2 == 0xff
2734 || bFiller2 == bFiller1)
2735 bFiller2++;
2736 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2737 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2738
2739 /* Again with a buffer filled with a byte not occuring in the previous result. */
2740 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2741 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2742 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2743 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2744 if (bRing != 0)
2745 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2746 else
2747 {
2748 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2749 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2750 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2751 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2752 }
2753 g_usBs3TestStep++;
2754
2755 /*
2756 * Try loading a bunch of different limit+base value to check what happens,
2757 * especially what happens wrt the top part of the base in 16-bit mode.
2758 */
2759 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2760 {
2761 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2762 {
2763 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2764 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2765 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2766 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2767 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2768 if (bRing != 0 || s_aValues64[i].fGP)
2769 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2770 else
2771 {
2772 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2773 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2774 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2775 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2776 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2777 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2778 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2779 }
2780 g_usBs3TestStep++;
2781 }
2782 }
2783 else
2784 {
2785 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2786 {
2787 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2788 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2789 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2790 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2791 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2792 if (bRing != 0)
2793 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2794 else
2795 {
2796 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2797 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2798 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2799 || ( cbBaseLoaded != 4
2800 && pbBufSave[2+3] != bTop16BitBase)
2801 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2802 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2803 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2804 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2805 }
2806 g_usBs3TestStep++;
2807 }
2808 }
2809
2810 /*
2811 * Slide the buffer along 8 bytes to cover misalignment.
2812 */
2813 for (off = 0; off < 8; off++)
2814 {
2815 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2816 CtxUdExpected.rbx.u = Ctx.rbx.u;
2817
2818 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2819 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2820 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2821 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2822 if (bRing != 0)
2823 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2824 else
2825 {
2826 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2827 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2828 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2829 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2830 }
2831 g_usBs3TestStep++;
2832 }
2833 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2834 CtxUdExpected.rbx.u = Ctx.rbx.u;
2835
2836 /*
2837 * Play with the selector limit if the target mode supports limit checking
2838 * We use BS3_SEL_TEST_PAGE_00 for this
2839 */
2840 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2841 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2842 {
2843 uint16_t cbLimit;
2844 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2845 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2846 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2847 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2848 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2849 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2850
2851 if (pWorker->fSs)
2852 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2853 else
2854 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2855
2856 /* Expand up (normal). */
2857 for (off = 0; off < 8; off++)
2858 {
2859 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2860 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2861 {
2862 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2863
2864 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2865 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2866 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2867 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2868 if (bRing != 0)
2869 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2870 else if (off + cbIdtr <= cbLimit + 1)
2871 {
2872 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2873 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2874 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2875 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2876 }
2877 else if (pWorker->fSs)
2878 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2879 else
2880 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2881 g_usBs3TestStep++;
2882
2883 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2884 abBufLoad[off] = abBufLoad[off + 1] = 0;
2885 abBufLoad[off + 2] |= 1;
2886 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2887 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2888 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2889 if (bRing != 0)
2890 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2891 else if (off + cbIdtr <= cbLimit + 1)
2892 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2893 else if (pWorker->fSs)
2894 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2895 else
2896 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2897 }
2898 }
2899
2900 /* Expand down (weird). Inverted valid area compared to expand up,
2901 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2902 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2903 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2904 (because in a normal expand up the 0ffffh means all 64KB are
2905 accessible). */
2906 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2907 for (off = 0; off < 8; off++)
2908 {
2909 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2910 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2911 {
2912 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2913
2914 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2915 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2916 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2917 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2918 if (bRing != 0)
2919 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2920 else if (off > cbLimit)
2921 {
2922 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2923 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2924 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2925 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2926 }
2927 else if (pWorker->fSs)
2928 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2929 else
2930 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2931 g_usBs3TestStep++;
2932
2933 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2934 abBufLoad[off] = abBufLoad[off + 1] = 0;
2935 abBufLoad[off + 2] |= 3;
2936 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2937 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2938 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2939 if (bRing != 0)
2940 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2941 else if (off > cbLimit)
2942 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2943 else if (pWorker->fSs)
2944 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2945 else
2946 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2947 }
2948 }
2949
2950 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2951 CtxUdExpected.rbx.u = Ctx.rbx.u;
2952 CtxUdExpected.ss = Ctx.ss;
2953 CtxUdExpected.ds = Ctx.ds;
2954 }
2955
2956 /*
2957 * Play with the paging.
2958 */
2959 if ( BS3_MODE_IS_PAGED(bTestMode)
2960 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2961 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2962 {
2963 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2964
2965 /*
2966 * Slide the load buffer towards the trailing guard page.
2967 */
2968 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2969 CtxUdExpected.ss = Ctx.ss;
2970 CtxUdExpected.ds = Ctx.ds;
2971 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2972 {
2973 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2974 if (off < X86_PAGE_SIZE)
2975 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2976 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2977 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2978 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2979 if (bRing != 0)
2980 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2981 else if (off + cbIdtr <= X86_PAGE_SIZE)
2982 {
2983 CtxUdExpected.rbx = Ctx.rbx;
2984 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2985 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2986 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2987 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2988 }
2989 else
2990 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2991 g_usBs3TestStep++;
2992
2993 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2994 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2995 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2996 && ( off != X86_PAGE_SIZE - 2
2997 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2998 )
2999 {
3000 pbTest[off] = 0;
3001 if (off + 1 < X86_PAGE_SIZE)
3002 pbTest[off + 1] = 0;
3003 if (off + 2 < X86_PAGE_SIZE)
3004 pbTest[off + 2] |= 7;
3005 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3006 if (bRing != 0)
3007 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3008 else
3009 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3010 g_usBs3TestStep++;
3011 }
3012 }
3013
3014 /*
3015 * Now, do it the other way around. It should look normal now since writing
3016 * the limit will #PF first and nothing should be written.
3017 */
3018 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
3019 {
3020 Bs3MemSet(pbTest, bFiller1, 48);
3021 if (off >= 0)
3022 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3023 else if (off + cbIdtr > 0)
3024 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
3025 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
3026 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3027 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3028 if (bRing != 0)
3029 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3030 else if (off >= 0)
3031 {
3032 CtxUdExpected.rbx = Ctx.rbx;
3033 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3034 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
3035 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
3036 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3037 }
3038 else
3039 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3040 g_usBs3TestStep++;
3041
3042 /* Again with messed up base as well (triple fault if buggy). */
3043 if (off < 0 && off > -cbIdtr)
3044 {
3045 if (off + 2 >= 0)
3046 pbTest[off + 2] |= 15;
3047 pbTest[off + cbIdtr - 1] ^= 0xaa;
3048 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3049 if (bRing != 0)
3050 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3051 else
3052 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3053 g_usBs3TestStep++;
3054 }
3055 }
3056
3057 /*
3058 * Combine paging and segment limit and check ordering.
3059 * This is kind of interesting here since it the instruction seems to
3060 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3061 *
3062 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3063 * that's what f486Weirdness deals with.
3064 */
3065 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3066 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3067 {
3068 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3069 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3070 uint16_t cbLimit;
3071
3072 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3073 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3074 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3075 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3076 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3077
3078 if (pWorker->fSs)
3079 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3080 else
3081 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3082
3083 /* Expand up (normal), approaching tail guard page. */
3084 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3085 {
3086 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3087 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3088 {
3089 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3090 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3091 if (off < X86_PAGE_SIZE)
3092 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3093 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3094 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3095 if (bRing != 0)
3096 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3097 else if (off + cbIdtr <= cbLimit + 1)
3098 {
3099 /* No #GP, but maybe #PF. */
3100 if (off + cbIdtr <= X86_PAGE_SIZE)
3101 {
3102 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3103 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3104 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3105 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3106 }
3107 else
3108 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3109 }
3110 /* No #GP/#SS on limit, but instead #PF? */
3111 else if ( !f486Weirdness
3112 ? off < cbLimit && off >= 0xfff
3113 : off + 2 < cbLimit && off >= 0xffd)
3114 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3115 /* #GP/#SS on limit or base. */
3116 else if (pWorker->fSs)
3117 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3118 else
3119 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3120
3121 g_usBs3TestStep++;
3122
3123 /* Set DS to 0 and check that we get #GP(0). */
3124 if (!pWorker->fSs)
3125 {
3126 Ctx.ds = 0;
3127 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3128 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3129 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3130 g_usBs3TestStep++;
3131 }
3132 }
3133 }
3134
3135 /* Expand down. */
3136 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3137 uFlatTest -= X86_PAGE_SIZE;
3138
3139 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3140 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3141 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3142 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3143
3144 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3145 {
3146 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3147 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3148 {
3149 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3150 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3151 if (off >= X86_PAGE_SIZE)
3152 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3153 else if (off > X86_PAGE_SIZE - cbIdtr)
3154 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3155 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3156 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3157 if (bRing != 0)
3158 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3159 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3160 {
3161 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3162 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3163 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3164 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3165 }
3166 else if (cbLimit < off && off < X86_PAGE_SIZE)
3167 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3168 else if (pWorker->fSs)
3169 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3170 else
3171 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3172 g_usBs3TestStep++;
3173 }
3174 }
3175
3176 pbTest += X86_PAGE_SIZE;
3177 uFlatTest += X86_PAGE_SIZE;
3178 }
3179
3180 Bs3MemGuardedTestPageFree(pbTest);
3181 }
3182
3183 /*
3184 * Check non-canonical 64-bit space.
3185 */
3186 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3187 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3188 {
3189 /* Make our references relative to the gap. */
3190 pbTest += g_cbBs3PagingOneCanonicalTrap;
3191
3192 /* Hit it from below. */
3193 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3194 {
3195 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3196 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3197 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3198 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3199 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3200 if (off + cbIdtr > 0 || bRing != 0)
3201 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3202 else
3203 {
3204 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3205 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3206 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3207 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3208 }
3209 }
3210
3211 /* Hit it from above. */
3212 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3213 {
3214 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3215 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3216 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3217 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3218 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3219 if (off < 0 || bRing != 0)
3220 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3221 else
3222 {
3223 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3224 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3225 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3226 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3227 }
3228 }
3229
3230 }
3231}
3232
3233
3234static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3235 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3236{
3237 unsigned idx;
3238 unsigned bRing;
3239 unsigned iStep = 0;
3240
3241 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3242 test and don't want to bother with double faults. */
3243 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3244 {
3245 for (idx = 0; idx < cWorkers; idx++)
3246 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3247 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3248 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3249 || ( bTestMode > BS3_MODE_PE16
3250 || ( bTestMode == BS3_MODE_PE16
3251 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3252 {
3253 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3254 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3255 g_usBs3TestStep = iStep;
3256 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3257 iStep += 1000;
3258 }
3259 if (BS3_MODE_IS_RM_SYS(bTestMode))
3260 break;
3261 }
3262}
3263
3264
3265BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3266{
3267 union
3268 {
3269 RTIDTR Idtr;
3270 uint8_t ab[32]; /* At least cbIdtr*2! */
3271 } Expected;
3272
3273 //if (bMode != BS3_MODE_LM64) return 0;
3274 bs3CpuBasic2_SetGlobals(bMode);
3275
3276 /*
3277 * Pass to common worker which is only compiled once per mode.
3278 */
3279 Bs3MemZero(&Expected, sizeof(Expected));
3280 ASMGetIDTR(&Expected.Idtr);
3281
3282 if (BS3_MODE_IS_RM_SYS(bMode))
3283 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3284 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3285 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3286 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3287 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3288 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3289 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3290 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3291 else
3292 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3293 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3294
3295 /*
3296 * Re-initialize the IDT.
3297 */
3298 Bs3TrapReInit();
3299 return 0;
3300}
3301
3302
3303BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3304{
3305 union
3306 {
3307 RTGDTR Gdtr;
3308 uint8_t ab[32]; /* At least cbIdtr*2! */
3309 } Expected;
3310
3311 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3312 bs3CpuBasic2_SetGlobals(bMode);
3313
3314 /*
3315 * Pass to common worker which is only compiled once per mode.
3316 */
3317 if (BS3_MODE_IS_RM_SYS(bMode))
3318 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3319 Bs3MemZero(&Expected, sizeof(Expected));
3320 ASMGetGDTR(&Expected.Gdtr);
3321
3322 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3323 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3324
3325 /*
3326 * Re-initialize the IDT.
3327 */
3328 Bs3TrapReInit();
3329 return 0;
3330}
3331
3332typedef union IRETBUF
3333{
3334 uint64_t au64[6]; /* max req is 5 */
3335 uint32_t au32[12]; /* max req is 9 */
3336 uint16_t au16[24]; /* max req is 5 */
3337 uint8_t ab[48];
3338} IRETBUF;
3339typedef IRETBUF BS3_FAR *PIRETBUF;
3340
3341
3342static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3343 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3344{
3345 if (cbPop == 2)
3346 {
3347 pIretBuf->au16[0] = (uint16_t)uPC;
3348 pIretBuf->au16[1] = uCS;
3349 pIretBuf->au16[2] = (uint16_t)fEfl;
3350 pIretBuf->au16[3] = (uint16_t)uSP;
3351 pIretBuf->au16[4] = uSS;
3352 }
3353 else if (cbPop != 8)
3354 {
3355 pIretBuf->au32[0] = (uint32_t)uPC;
3356 pIretBuf->au16[1*2] = uCS;
3357 pIretBuf->au32[2] = (uint32_t)fEfl;
3358 pIretBuf->au32[3] = (uint32_t)uSP;
3359 pIretBuf->au16[4*2] = uSS;
3360 }
3361 else
3362 {
3363 pIretBuf->au64[0] = uPC;
3364 pIretBuf->au16[1*4] = uCS;
3365 pIretBuf->au64[2] = fEfl;
3366 pIretBuf->au64[3] = uSP;
3367 pIretBuf->au16[4*4] = uSS;
3368 }
3369}
3370
3371
3372static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3373 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3374{
3375 BS3TRAPFRAME TrapCtx;
3376 BS3REGCTX Ctx;
3377 BS3REGCTX CtxUdExpected;
3378 BS3REGCTX TmpCtx;
3379 BS3REGCTX TmpCtxExpected;
3380 uint8_t abLowUd[8];
3381 uint8_t abLowIret[8];
3382 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3383 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3384 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3385 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3386 int iRingDst;
3387 int iRingSrc;
3388 uint16_t uDplSs;
3389 uint16_t uRplCs;
3390 uint16_t uRplSs;
3391// int i;
3392 uint8_t BS3_FAR *pbTest;
3393
3394 NOREF(abLowUd);
3395#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3396 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3397#define IRETBUF_SET_REG(a_idx, a_uValue) \
3398 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3399 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3400 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3401 else *(uint64_t)pbTmp = (a_uValue); \
3402 } while (0)
3403
3404 /* make sure they're allocated */
3405 Bs3MemZero(&Ctx, sizeof(Ctx));
3406 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3407 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3408 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3409 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3410
3411 /*
3412 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3413 * copies of both iret and ud in the first 64KB of memory. The stack is
3414 * below 64KB, so we'll just copy the instructions onto the stack.
3415 */
3416 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3417 Bs3MemCpy(abLowIret, pfnIret, 4);
3418
3419 /*
3420 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3421 * - Point the context at our iret instruction.
3422 * - Point SS:xSP at pIretBuf.
3423 */
3424 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3425 if (!fUseLowCode)
3426 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3427 else
3428 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3429 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3430 g_uBs3TrapEipHint = Ctx.rip.u32;
3431 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3432
3433 /*
3434 * The first success (UD) context keeps the same code bit-count as the iret.
3435 */
3436 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3437 if (!fUseLowCode)
3438 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3439 else
3440 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3441 CtxUdExpected.rsp.u += cbSameCplFrame;
3442
3443 /*
3444 * Check that it works at all.
3445 */
3446 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3447 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3448
3449 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3450 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3451 g_usBs3TestStep++;
3452
3453 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3454 {
3455 /* Selectors are modified when switching rings, so we need to know
3456 what we're dealing with there. */
3457 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3458 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3459 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3460 if (Ctx.fs || Ctx.gs)
3461 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3462
3463 /*
3464 * Test returning to outer rings if protected mode.
3465 */
3466 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3467 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3468 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3469 {
3470 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3471 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3472 TmpCtx.es = TmpCtxExpected.es;
3473 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3474 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3475 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3476 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3477 g_usBs3TestStep++;
3478 }
3479
3480 /*
3481 * Check CS.RPL and SS.RPL.
3482 */
3483 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3484 {
3485 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3486 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3487 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3488 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3489 {
3490 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3491 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3492 TmpCtx.es = TmpCtxExpected.es;
3493 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3494 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3495 {
3496 uint16_t const uSrcEs = TmpCtx.es;
3497 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3498 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3499
3500 /* CS.RPL */
3501 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3502 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3503 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3504 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3505 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3506 else
3507 {
3508 if (iRingDst < iRingSrc)
3509 TmpCtx.es = 0;
3510 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3511 TmpCtx.es = uSrcEs;
3512 }
3513 g_usBs3TestStep++;
3514
3515 /* SS.RPL */
3516 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3517 {
3518 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3519 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3520 {
3521 /* SS.DPL (iRingDst == CS.DPL) */
3522 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3523 {
3524 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3525 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3526 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3527 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3528
3529 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3530 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3531 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3532 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3533 {
3534 if (iRingDst < iRingSrc)
3535 TmpCtx.es = 0;
3536 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3537 }
3538 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3539 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3540 else
3541 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3542 TmpCtx.es = uSrcEs;
3543 g_usBs3TestStep++;
3544 }
3545 }
3546
3547 TmpCtxExpected.ss = uSavedDstSs;
3548 }
3549 }
3550 }
3551 }
3552 }
3553
3554 /*
3555 * Special 64-bit checks.
3556 */
3557 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3558 {
3559 /* The VM flag is completely ignored. */
3560 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3561 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3562 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3563 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3564 g_usBs3TestStep++;
3565
3566 /* The NT flag can be loaded just fine. */
3567 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3568 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3569 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3570 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3571 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3572 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3573 g_usBs3TestStep++;
3574
3575 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3576 Ctx.rflags.u32 |= X86_EFL_NT;
3577 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3578 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3579 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3580 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3581 g_usBs3TestStep++;
3582
3583 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3584 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3585 if (pbTest != NULL)
3586 {
3587 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3588 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3589 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3590 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3591 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3592 g_usBs3TestStep++;
3593
3594 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3595 Bs3MemGuardedTestPageFree(pbTest);
3596 }
3597 Ctx.rflags.u32 &= ~X86_EFL_NT;
3598 }
3599}
3600
3601
3602BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3603{
3604 struct
3605 {
3606 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3607 IRETBUF IRetBuf;
3608 uint8_t abGuard[32];
3609 } uBuf;
3610 size_t cbUnused;
3611
3612 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3613 bs3CpuBasic2_SetGlobals(bMode);
3614
3615 /*
3616 * Primary instruction form.
3617 */
3618 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3619 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3620 if (BS3_MODE_IS_16BIT_CODE(bMode))
3621 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3622 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3623 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3624 else
3625 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3626
3627 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3628 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3629 - (uintptr_t)uBuf.abExtraStack;
3630 if (cbUnused < 2048)
3631 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3632
3633 /*
3634 * Secondary variation: opsize prefixed.
3635 */
3636 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3637 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3638 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3639 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3640 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3641 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3642 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3643 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3644 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3645 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3646 - (uintptr_t)uBuf.abExtraStack;
3647 if (cbUnused < 2048)
3648 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3649
3650 /*
3651 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3652 */
3653 if (BS3_MODE_IS_64BIT_CODE(bMode))
3654 {
3655 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3656 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3657 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3658 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3659 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3660 - (uintptr_t)uBuf.abExtraStack;
3661 if (cbUnused < 2048)
3662 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3663 }
3664
3665 return 0;
3666}
3667
3668
3669
3670/*********************************************************************************************************************************
3671* Non-far JMP & CALL Tests *
3672*********************************************************************************************************************************/
3673#define PROTO_ALL(a_Template) \
3674 FNBS3FAR a_Template ## _c16, \
3675 a_Template ## _c32, \
3676 a_Template ## _c64
3677PROTO_ALL(bs3CpuBasic2_jmp_jb__ud2);
3678PROTO_ALL(bs3CpuBasic2_jmp_jb_back__ud2);
3679PROTO_ALL(bs3CpuBasic2_jmp_jv__ud2);
3680PROTO_ALL(bs3CpuBasic2_jmp_jv_back__ud2);
3681PROTO_ALL(bs3CpuBasic2_jmp_ind_mem__ud2);
3682PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX__ud2);
3683PROTO_ALL(bs3CpuBasic2_jmp_ind_xDI__ud2);
3684FNBS3FAR bs3CpuBasic2_jmp_ind_r9__ud2_c64;
3685PROTO_ALL(bs3CpuBasic2_call_jv__ud2);
3686PROTO_ALL(bs3CpuBasic2_call_jv_back__ud2);
3687PROTO_ALL(bs3CpuBasic2_call_ind_mem__ud2);
3688PROTO_ALL(bs3CpuBasic2_call_ind_xAX__ud2);
3689PROTO_ALL(bs3CpuBasic2_call_ind_xDI__ud2);
3690FNBS3FAR bs3CpuBasic2_call_ind_r9__ud2_c64;
3691
3692PROTO_ALL(bs3CpuBasic2_jmp_opsize_begin);
3693PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize__ud2);
3694PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize_back__ud2);
3695PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize__ud2);
3696PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize_back__ud2);
3697PROTO_ALL(bs3CpuBasic2_jmp_ind_mem_opsize__ud2);
3698FNBS3FAR bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64;
3699PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX_opsize__ud2);
3700PROTO_ALL(bs3CpuBasic2_call_jv_opsize__ud2);
3701PROTO_ALL(bs3CpuBasic2_call_jv_opsize_back__ud2);
3702PROTO_ALL(bs3CpuBasic2_call_ind_mem_opsize__ud2);
3703FNBS3FAR bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64;
3704PROTO_ALL(bs3CpuBasic2_call_ind_xAX_opsize__ud2);
3705PROTO_ALL(bs3CpuBasic2_jmp_opsize_end);
3706#undef PROTO_ALL
3707
3708FNBS3FAR bs3CpuBasic2_jmptext16_start;
3709
3710FNBS3FAR bs3CpuBasic2_jmp_target_wrap_forward;
3711FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_forward__ud2;
3712FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2;
3713FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_forward__ud2;
3714FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2;
3715FNBS3FAR bs3CpuBasic2_call_jv16_wrap_forward__ud2;
3716FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2;
3717
3718FNBS3FAR bs3CpuBasic2_jmp_target_wrap_backward;
3719FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_backward__ud2;
3720FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2;
3721FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_backward__ud2;
3722FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2;
3723FNBS3FAR bs3CpuBasic2_call_jv16_wrap_backward__ud2;
3724FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2;
3725
3726
3727
3728/**
3729 * Entrypoint for non-far JMP & CALL tests.
3730 *
3731 * @returns 0 or BS3TESTDOMODE_SKIPPED.
3732 * @param bMode The CPU mode we're testing.
3733 *
3734 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
3735 * with control registers and such.
3736 */
3737BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_jmp_call)(uint8_t bMode)
3738{
3739 BS3TRAPFRAME TrapCtx;
3740 BS3REGCTX Ctx;
3741 BS3REGCTX CtxExpected;
3742 unsigned iTest;
3743
3744 /* make sure they're allocated */
3745 Bs3MemZero(&Ctx, sizeof(Ctx));
3746 Bs3MemZero(&CtxExpected, sizeof(Ctx));
3747 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3748
3749 bs3CpuBasic2_SetGlobals(bMode);
3750
3751 /*
3752 * Create a context.
3753 */
3754 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
3755 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
3756
3757 /*
3758 * 16-bit tests.
3759 *
3760 * When opsize is 16-bit relative jumps will do 16-bit calculations and
3761 * modify IP. This means that it is not possible to trigger a segment
3762 * limit #GP(0) when the limit is set to 0xffff.
3763 */
3764 if (BS3_MODE_IS_16BIT_CODE(bMode))
3765 {
3766 static struct
3767 {
3768 int8_t iWrap;
3769 bool fOpSizePfx;
3770 int8_t iGprIndirect;
3771 bool fCall;
3772 FPFNBS3FAR pfnTest;
3773 }
3774 const s_aTests[] =
3775 {
3776 { 0, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c16, },
3777 { 0, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c16, },
3778 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c16, },
3779 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c16, },
3780 { 0, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c16, },
3781 { 0, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c16, },
3782 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c16, },
3783 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c16, },
3784 { 0, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c16, },
3785 { 0, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c16, },
3786 { 0, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c16, },
3787 { 0, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c16, },
3788 { 0, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c16, },
3789 { 0, false, -1, true, bs3CpuBasic2_call_jv__ud2_c16, },
3790 { 0, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c16, },
3791 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c16, },
3792 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c16, },
3793 { 0, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c16, },
3794 { 0, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c16, },
3795 { 0, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c16, },
3796 { 0, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c16, },
3797 { 0, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c16, },
3798
3799 { -1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_backward__ud2, },
3800 { +1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_forward__ud2, },
3801 { -1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2, },
3802 { +1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2, },
3803
3804 { -1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_backward__ud2, },
3805 { +1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_forward__ud2, },
3806 { -1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2, },
3807 { +1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2, },
3808 { -1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_backward__ud2, },
3809 { +1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_forward__ud2, },
3810 { -1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2, },
3811 { +1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2, },
3812 };
3813
3814 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3815 Bs3SelSetup16BitCode(&Bs3GdteSpare03, Bs3SelLnkPtrToFlat(bs3CpuBasic2_jmptext16_start), 0);
3816
3817 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3818 {
3819 uint64_t uGprSaved;
3820 if (s_aTests[iTest].iWrap == 0)
3821 {
3822 uint8_t const BS3_FAR *fpbCode;
3823 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
3824 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
3825 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
3826 }
3827 else
3828 {
3829 if (BS3_MODE_IS_RM_OR_V86(bMode))
3830 Ctx.cs = BS3_FP_SEG(s_aTests[iTest].pfnTest);
3831 else
3832 Ctx.cs = BS3_SEL_SPARE_03;
3833 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3834 if (s_aTests[iTest].fOpSizePfx)
3835 CtxExpected.rip.u = Ctx.rip.u;
3836 else if (s_aTests[iTest].iWrap < 0)
3837 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3838 else
3839 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_forward);
3840 }
3841 CtxExpected.cs = Ctx.cs;
3842 if (s_aTests[iTest].iGprIndirect >= 0)
3843 {
3844 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
3845 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
3846 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
3847 }
3848 CtxExpected.rsp.u = Ctx.rsp.u;
3849 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3850 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3851 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u);
3852
3853 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3854 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3855 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
3856 else
3857 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3858 g_usBs3TestStep++;
3859
3860 /* Again single stepping: */
3861 //Bs3TestPrintf("stepping...\n");
3862 Bs3RegSetDr6(0);
3863 Ctx.rflags.u16 |= X86_EFL_TF;
3864 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3865 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3866 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3867 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
3868 else
3869 {
3870 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3871 bs3CpuBasic2_CheckDr6InitVal();
3872 }
3873 Ctx.rflags.u16 &= ~X86_EFL_TF;
3874 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3875 g_usBs3TestStep++;
3876
3877 if (s_aTests[iTest].iGprIndirect >= 0)
3878 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
3879 }
3880
3881 /* Limit the wraparound CS segment to exclude bs3CpuBasic2_jmp_target_wrap_backward
3882 and run the backward wrapping tests. */
3883 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3884 {
3885 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward) - 1;
3886 CtxExpected.cs = Ctx.cs = BS3_SEL_SPARE_03;
3887 CtxExpected.rsp.u = Ctx.rsp.u;
3888 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3889 if (s_aTests[iTest].iWrap < 0)
3890 {
3891 CtxExpected.rip.u = Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3892 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v1\n", Ctx.cs, Ctx.rip.u);
3893 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3894 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3895 g_usBs3TestStep++;
3896 }
3897
3898 /* Do another round where we put the limit in the middle of the UD2
3899 instruction we're jumping to: */
3900 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3901 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3902 if (s_aTests[iTest].iWrap < 0)
3903 {
3904 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3905 if (s_aTests[iTest].fOpSizePfx)
3906 CtxExpected.rip.u = Ctx.rip.u;
3907 else
3908 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3909 CtxExpected.rsp.u = Ctx.rsp.u;
3910 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3911 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3912 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
3913 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3914 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3915 g_usBs3TestStep++;
3916 }
3917 }
3918
3919 }
3920 /*
3921 * 32-bit & 64-bit tests.
3922 *
3923 * When the opsize prefix is applied here, IP is updated and bits 63:16
3924 * cleared. However in 64-bit mode, Intel ignores the opsize prefix
3925 * whereas AMD doesn't and it works like you expect.
3926 */
3927 else
3928 {
3929 static struct
3930 {
3931 uint8_t cBits;
3932 bool fOpSizePfx;
3933 bool fIgnPfx;
3934 int8_t iGprIndirect;
3935 bool fCall;
3936 FPFNBS3FAR pfnTest;
3937 }
3938 const s_aTests[] =
3939 {
3940 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3941 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3942 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3943 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3944 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3945 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3946 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3947 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3948 { 32, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c32, },
3949 { 32, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c32, },
3950 { 32, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c32, },
3951 { 32, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c32, },
3952 { 32, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c32, },
3953 { 32, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, },
3954 { 32, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3955 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3956 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3957 { 32, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c32, },
3958 { 32, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c32, },
3959 { 32, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c32, },
3960 { 32, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c32, },
3961 { 32, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c32, },
3962 /* 64bit/Intel: Use the _c64 tests, which are written to ignore the o16 prefix. */
3963 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb__ud2_c64, },
3964 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c64, },
3965 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c64, },
3966 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c64, },
3967 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv__ud2_c64, },
3968 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c64, },
3969 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c64, },
3970 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c64, },
3971 { 64, false, true, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, },
3972 { 64, true, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64, },
3973 { 64, false, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, },
3974 { 64, false, true, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, },
3975 { 64, false, true, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, },
3976 { 64, true, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3977 { 64, false, true, -1, true, bs3CpuBasic2_call_jv__ud2_c64, },
3978 { 64, false, true, -1, true, bs3CpuBasic2_call_jv_back__ud2_c64, },
3979 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c64, },
3980 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c64, },
3981 { 64, false, true, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, },
3982 { 64, true, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64,},
3983 { 64, false, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, },
3984 { 64, false, true, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, },
3985 { 64, false, true, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, },
3986 { 64, true, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3987 /* 64bit/AMD: Use the _c32 tests. */
3988 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3989 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3990 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3991 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3992 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3993 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3994 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3995 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3996 { 64, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, }, /* using c64 here */
3997 { 64, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c64, }, /* ditto */
3998 { 64, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, }, /* ditto */
3999 { 64, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, }, /* ditto */
4000 { 64, false, false, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, }, /* ditto */
4001 { 64, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* ditto */
4002 { 64, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, }, /* using c32 again */
4003 { 64, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
4004 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
4005 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
4006 { 64, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, }, /* using c64 here */
4007 { 64, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c64, }, /* ditto */
4008 { 64, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, }, /* ditto */
4009 { 64, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, }, /* ditto */
4010 { 64, false, false, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, }, /* ditto */
4011 { 64, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* ditto */
4012 };
4013 uint8_t const cBits = BS3_MODE_IS_64BIT_CODE(bMode) ? 64 : 32;
4014 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4015 bool const fIgnPfx = cBits == 64 && enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4016
4017 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4018 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_begin_c32);
4019 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_end_c64) - offLow;
4020 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4021 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4022 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4023 Bs3TestFailedF("Opsize overriden jumps are out of place: %#x LB %#x\n", offLow, cbLow);
4024 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4025 if (!fIgnPfx)
4026 {
4027 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4028 if (s_aTests[iTest].fOpSizePfx && s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4029 {
4030 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4031 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4032 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4033 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4034 pbCode16[offUd + 1] = 0xf1;
4035 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4036 pbLow[offUd + 1] = 0x0b;
4037 }
4038 }
4039
4040 /* Run the tests. */
4041 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4042 {
4043 if (s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4044 {
4045 uint64_t uGprSaved;
4046 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4047 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4048 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4049 if (s_aTests[iTest].iGprIndirect >= 0)
4050 {
4051 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
4052 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
4053 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
4054 }
4055 if (s_aTests[iTest].fOpSizePfx && !fIgnPfx)
4056 CtxExpected.rip.u &= UINT16_MAX;
4057 CtxExpected.rsp.u = Ctx.rsp.u;
4058 if (s_aTests[iTest].fCall)
4059 CtxExpected.rsp.u -= s_aTests[iTest].cBits == 64 ? 8
4060 : !s_aTests[iTest].fOpSizePfx ? 4 : 2;
4061
4062 //Bs3TestPrintf("cs:rip=%04RX16:%08RX64\n", Ctx.cs, Ctx.rip.u);
4063
4064 if (BS3_MODE_IS_16BIT_SYS(bMode))
4065 g_uBs3TrapEipHint = s_aTests[iTest].fOpSizePfx ? 0 : Ctx.rip.u32;
4066 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4067
4068 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4069 g_usBs3TestStep++;
4070
4071 /* Again single stepping: */
4072 //Bs3TestPrintf("stepping...\n");
4073 Bs3RegSetDr6(0);
4074 Ctx.rflags.u16 |= X86_EFL_TF;
4075 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4076 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4077 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4078 Ctx.rflags.u16 &= ~X86_EFL_TF;
4079 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4080 g_usBs3TestStep++;
4081
4082 if (s_aTests[iTest].iGprIndirect >= 0)
4083 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
4084 }
4085 }
4086
4087 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4088 }
4089
4090 return 0;
4091}
4092
4093
4094/*********************************************************************************************************************************
4095* FAR JMP & FAR CALL Tests *
4096*********************************************************************************************************************************/
4097#define PROTO_ALL(a_Template) \
4098 FNBS3FAR a_Template ## _c16, \
4099 a_Template ## _c32, \
4100 a_Template ## _c64
4101PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_begin);
4102
4103FNBS3FAR bs3CpuBasic2_jmpf_ptr_rm__ud2_c16;
4104PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r0__ud2);
4105PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r1__ud2);
4106PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r2__ud2);
4107PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r3__ud2);
4108PROTO_ALL(bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2);
4109PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2);
4110PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2);
4111
4112FNBS3FAR bs3CpuBasic2_callf_ptr_rm__ud2_c16;
4113PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r0__ud2);
4114PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r1__ud2);
4115PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r2__ud2);
4116PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r3__ud2);
4117PROTO_ALL(bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2);
4118PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs64__ud2);
4119PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs16l__ud2);
4120
4121FNBS3FAR bs3CpuBasic2_jmpf_mem_rm__ud2_c16;
4122PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r0__ud2);
4123PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r1__ud2);
4124PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r2__ud2);
4125PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r3__ud2);
4126PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16__ud2);
4127PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs32__ud2);
4128PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs64__ud2);
4129PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2);
4130
4131FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64;
4132FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64;
4133FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64;
4134FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64;
4135FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64;
4136FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64;
4137FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64;
4138FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64;
4139
4140FNBS3FAR bs3CpuBasic2_callf_mem_rm__ud2_c16;
4141PROTO_ALL(bs3CpuBasic2_callf_mem_same_r0__ud2);
4142PROTO_ALL(bs3CpuBasic2_callf_mem_same_r1__ud2);
4143PROTO_ALL(bs3CpuBasic2_callf_mem_same_r2__ud2);
4144PROTO_ALL(bs3CpuBasic2_callf_mem_same_r3__ud2);
4145PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16__ud2);
4146PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs32__ud2);
4147PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs64__ud2);
4148PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16l__ud2);
4149
4150FNBS3FAR bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64;
4151FNBS3FAR bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64;
4152FNBS3FAR bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64;
4153FNBS3FAR bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64;
4154FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64;
4155FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64;
4156FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64;
4157FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64;
4158
4159PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_end);
4160#undef PROTO_ALL
4161
4162
4163
4164/**
4165 * Entrypoint for FAR JMP & FAR CALL tests.
4166 *
4167 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4168 * @param bMode The CPU mode we're testing.
4169 *
4170 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
4171 * with control registers and such.
4172 */
4173BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_jmp_call)(uint8_t bMode)
4174{
4175 BS3TRAPFRAME TrapCtx;
4176 BS3REGCTX Ctx;
4177 BS3REGCTX CtxExpected;
4178 unsigned iTest;
4179
4180 /* make sure they're allocated */
4181 Bs3MemZero(&Ctx, sizeof(Ctx));
4182 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4183 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4184
4185 bs3CpuBasic2_SetGlobals(bMode);
4186
4187 /*
4188 * Create a context.
4189 */
4190 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4191 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4192
4193 if (Ctx.rax.u8 == 0 || Ctx.rax.u8 == 0xff) /* for salc & the 64-bit detection */
4194 CtxExpected.rax.u8 = Ctx.rax.u8 = 0x42;
4195
4196 /*
4197 * Set up spare selectors.
4198 */
4199 Bs3GdteSpare00 = Bs3Gdte_CODE16;
4200 Bs3GdteSpare00.Gen.u1Long = 1;
4201
4202 /*
4203 * 16-bit tests.
4204 */
4205 if (BS3_MODE_IS_16BIT_CODE(bMode))
4206 {
4207 static struct
4208 {
4209 bool fRmOrV86;
4210 bool fCall;
4211 uint16_t uDstSel;
4212 uint8_t uDstBits;
4213 bool fOpSizePfx;
4214 FPFNBS3FAR pfnTest;
4215 }
4216 const s_aTests[] =
4217 {
4218 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_ptr_rm__ud2_c16, },
4219 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c16, },
4220 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c16, },
4221 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c16, },
4222 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c16, },
4223 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c16, },
4224 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4225 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4226
4227 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_ptr_rm__ud2_c16, },
4228 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c16, },
4229 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c16, },
4230 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c16, },
4231 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c16, },
4232 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c16, },
4233 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4234 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4235
4236 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_mem_rm__ud2_c16, },
4237 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c16, },
4238 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c16, },
4239 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c16, },
4240 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c16, },
4241 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c16, },
4242 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c16, },
4243 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4244 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4245
4246 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_mem_rm__ud2_c16, },
4247 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c16, },
4248 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c16, },
4249 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c16, },
4250 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c16, },
4251 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c16, },
4252 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c16, },
4253 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4254 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4255 };
4256 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
4257
4258 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4259 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4260 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4261 if (BS3_MODE_IS_64BIT_SYS(bMode))
4262 {
4263 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c16);
4264 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c16) - offLow;
4265 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4266 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4267 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4268 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4269 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4270 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4271 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4272 {
4273 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4274 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4275 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4276 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4277 pbLow[offUd] = 0x0f;
4278 pbLow[offUd + 1] = 0x0b;
4279 }
4280 }
4281
4282 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4283 if (s_aTests[iTest].fRmOrV86 == fRmOrV86)
4284 {
4285 uint64_t const uSavedRsp = Ctx.rsp.u;
4286 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4287 uint8_t const BS3_FAR *fpbCode;
4288
4289 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4290 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4291 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4292 if ( s_aTests[iTest].uDstBits == 32
4293 || ( s_aTests[iTest].uDstBits == 64
4294 && !BS3_MODE_IS_16BIT_SYS(bMode)
4295 && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00))
4296 CtxExpected.rip.u += BS3_ADDR_BS3TEXT16;
4297 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode))
4298 CtxExpected.rip.u &= UINT16_MAX;
4299 CtxExpected.cs = s_aTests[iTest].uDstSel;
4300 if (fGp)
4301 {
4302 CtxExpected.rip.u = Ctx.rip.u;
4303 CtxExpected.cs = Ctx.cs;
4304 }
4305 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4306 CtxExpected.rsp.u = Ctx.rsp.u;
4307 if (s_aTests[iTest].fCall && !fGp)
4308 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 8 : 4;
4309 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4310 {
4311 if (BS3_MODE_IS_64BIT_SYS(bMode))
4312 CtxExpected.rip.u -= 1;
4313 else
4314 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4315 }
4316 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4317 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4318 if (!fGp)
4319 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4320 else
4321 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4322 Ctx.rsp.u = uSavedRsp;
4323 g_usBs3TestStep++;
4324
4325 /* Again single stepping: */
4326 //Bs3TestPrintf("stepping...\n");
4327 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4328 Ctx.rflags.u16 |= X86_EFL_TF;
4329 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4330 CtxExpected.rax.u = Ctx.rax.u;
4331 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4332 CtxExpected.rip.u -= 1;
4333 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4334 if (!fGp)
4335 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4336 else
4337 {
4338 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4339 bs3CpuBasic2_CheckDr6InitVal();
4340 }
4341 Ctx.rflags.u16 &= ~X86_EFL_TF;
4342 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4343 Ctx.rsp.u = uSavedRsp;
4344 g_usBs3TestStep++;
4345 }
4346 }
4347 /*
4348 * 32-bit tests.
4349 */
4350 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4351 {
4352 static struct
4353 {
4354 bool fCall;
4355 uint16_t uDstSel;
4356 uint8_t uDstBits;
4357 bool fOpSizePfx;
4358 FPFNBS3FAR pfnTest;
4359 }
4360 const s_aTests[] =
4361 {
4362 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4363 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4364 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4365 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4366 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4367 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4368 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4369
4370 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4371 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4372 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4373 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4374 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4375 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4376 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4377
4378 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c32, },
4379 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c32, },
4380 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c32, },
4381 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c32, },
4382 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c32, },
4383 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c32, },
4384 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4385 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4386
4387 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c32, },
4388 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c32, },
4389 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c32, },
4390 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c32, },
4391 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c32, },
4392 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c32, },
4393 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4394 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4395 };
4396
4397 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4398 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4399 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4400 if (BS3_MODE_IS_64BIT_SYS(bMode))
4401 {
4402 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c32);
4403 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c32) - offLow;
4404 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4405 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4406 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4407 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4408 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4409 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4410 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4411 {
4412 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4413 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4414 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4415 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4416 pbLow[offUd] = 0x0f;
4417 pbLow[offUd + 1] = 0x0b;
4418 }
4419 }
4420 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4421 {
4422 uint64_t const uSavedRsp = Ctx.rsp.u;
4423 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4424 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4425
4426 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4427 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4428 if ( s_aTests[iTest].uDstBits == 16
4429 || ( s_aTests[iTest].uDstBits == 64
4430 && ( BS3_MODE_IS_16BIT_SYS(bMode))
4431 || s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00))
4432 CtxExpected.rip.u &= UINT16_MAX;
4433 CtxExpected.cs = s_aTests[iTest].uDstSel;
4434 if (fGp)
4435 {
4436 CtxExpected.rip.u = Ctx.rip.u;
4437 CtxExpected.cs = Ctx.cs;
4438 }
4439 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4440 CtxExpected.rsp.u = Ctx.rsp.u;
4441 if (s_aTests[iTest].fCall && !fGp)
4442 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 8;
4443 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4444 {
4445 if (BS3_MODE_IS_64BIT_SYS(bMode))
4446 CtxExpected.rip.u -= 1;
4447 else
4448 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4449 }
4450 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4451 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4452 if (!fGp)
4453 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4454 else
4455 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4456 Ctx.rsp.u = uSavedRsp;
4457 g_usBs3TestStep++;
4458
4459 /* Again single stepping: */
4460 //Bs3TestPrintf("stepping...\n");
4461 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4462 Ctx.rflags.u16 |= X86_EFL_TF;
4463 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4464 CtxExpected.rax.u = Ctx.rax.u;
4465 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4466 CtxExpected.rip.u -= 1;
4467 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4468 if (!fGp)
4469 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4470 else
4471 {
4472 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4473 bs3CpuBasic2_CheckDr6InitVal();
4474 }
4475 Ctx.rflags.u16 &= ~X86_EFL_TF;
4476 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4477 Ctx.rsp.u = uSavedRsp;
4478 g_usBs3TestStep++;
4479 }
4480 }
4481 /*
4482 * 64-bit tests.
4483 */
4484 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4485 {
4486 static struct
4487 {
4488 bool fInvalid;
4489 bool fCall;
4490 uint16_t uDstSel;
4491 uint8_t uDstBits;
4492 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W, 3: 066h REX.W */
4493 int8_t fFix64OpSize;
4494 FPFNBS3FAR pfnTest;
4495 }
4496 const s_aTests[] =
4497 {
4498 /* invalid opcodes: */
4499 { true, false, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4500 { true, false, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4501 { true, false, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4502 { true, false, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4503 { true, false, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4504 { true, false, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, },
4505 { true, false, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, },
4506
4507 { true, true, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4508 { true, true, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4509 { true, true, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4510 { true, true, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4511 { true, true, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4512 { true, true, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, },
4513 { true, true, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, },
4514
4515 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c64, },
4516 { false, false, BS3_SEL_R1_CS64 | 1, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c64, },
4517 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c64, },
4518 { false, false, BS3_SEL_R3_CS64 | 3, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c64, },
4519 { false, false, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c64, },
4520 { false, false, BS3_SEL_R0_CS32, 32, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c64, },
4521 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4522 { false, false, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4523
4524 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64, },
4525 { false, false, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64, },
4526 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64, },
4527 { false, false, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64, },
4528 { false, false, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64, },
4529 { false, false, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64, },
4530 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4531 { false, false, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4532
4533 { false, true, BS3_SEL_R0_CS64, 64, 2, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c64, },
4534 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c64, },
4535 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c64, },
4536 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c64, },
4537 { false, true, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c64, },
4538 { false, true, BS3_SEL_R0_CS32, 32, 2, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c64, },
4539 { false, true, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4540 { false, true, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4541
4542 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64, },
4543 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64, },
4544 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64, },
4545 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64, },
4546 { false, true, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64, },
4547 { false, true, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64, },
4548 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4549 { false, true, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4550 };
4551 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4552 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4553
4554 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4555 {
4556 uint64_t const uSavedRsp = Ctx.rsp.u;
4557 bool const fUd = s_aTests[iTest].fInvalid;
4558 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4559 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4560
4561 if (s_aTests[iTest].fFix64OpSize != fFix64OpSize && s_aTests[iTest].fFix64OpSize >= 0)
4562 continue;
4563
4564 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4565 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4566 CtxExpected.cs = s_aTests[iTest].uDstSel;
4567 if (s_aTests[iTest].uDstBits == 16)
4568 CtxExpected.rip.u &= UINT16_MAX;
4569 else if (s_aTests[iTest].uDstBits == 64 && fFix64OpSize && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00)
4570 CtxExpected.rip.u |= UINT64_C(0xfffff00000000000);
4571
4572 if (fGp || fUd)
4573 {
4574 CtxExpected.rip.u = Ctx.rip.u;
4575 CtxExpected.cs = Ctx.cs;
4576 }
4577 CtxExpected.rsp.u = Ctx.rsp.u;
4578 if (s_aTests[iTest].fCall && !fGp && !fUd)
4579 {
4580 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx == 0 ? 8
4581 : s_aTests[iTest].fOpSizePfx == 1 ? 4 : 16;
4582 //Bs3TestPrintf("cs:rsp=%04RX16:%04RX64 -> %04RX64 (fOpSizePfx=%d)\n", Ctx.ss, Ctx.rsp.u, CtxExpected.rsp.u, s_aTests[iTest].fOpSizePfx);
4583 }
4584 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4585 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4586 if (!fGp || fUd)
4587 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4588 else
4589 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4590 Ctx.rsp.u = uSavedRsp;
4591 g_usBs3TestStep++;
4592
4593 /* Again single stepping: */
4594 //Bs3TestPrintf("stepping...\n");
4595 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4596 Ctx.rflags.u16 |= X86_EFL_TF;
4597 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4598 CtxExpected.rax.u = Ctx.rax.u;
4599 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4600 if (fUd)
4601 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4602 else if (!fGp)
4603 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4604 else
4605 {
4606 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4607 bs3CpuBasic2_CheckDr6InitVal();
4608 }
4609 Ctx.rflags.u16 &= ~X86_EFL_TF;
4610 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4611 Ctx.rsp.u = uSavedRsp;
4612 g_usBs3TestStep++;
4613 }
4614 }
4615 else
4616 Bs3TestFailed("wtf?");
4617
4618 return 0;
4619}
4620
4621
4622/*********************************************************************************************************************************
4623* Near RET *
4624*********************************************************************************************************************************/
4625#define PROTO_ALL(a_Template) \
4626 FNBS3FAR a_Template ## _c16, \
4627 a_Template ## _c32, \
4628 a_Template ## _c64
4629PROTO_ALL(bs3CpuBasic2_retn_opsize_begin);
4630PROTO_ALL(bs3CpuBasic2_retn__ud2);
4631PROTO_ALL(bs3CpuBasic2_retn_opsize__ud2);
4632PROTO_ALL(bs3CpuBasic2_retn_i24__ud2);
4633PROTO_ALL(bs3CpuBasic2_retn_i24_opsize__ud2);
4634PROTO_ALL(bs3CpuBasic2_retn_i760__ud2);
4635PROTO_ALL(bs3CpuBasic2_retn_i0__ud2);
4636PROTO_ALL(bs3CpuBasic2_retn_i0_opsize__ud2);
4637FNBS3FAR bs3CpuBasic2_retn_rexw__ud2_c64;
4638FNBS3FAR bs3CpuBasic2_retn_i24_rexw__ud2_c64;
4639FNBS3FAR bs3CpuBasic2_retn_opsize_rexw__ud2_c64;
4640FNBS3FAR bs3CpuBasic2_retn_rexw_opsize__ud2_c64;
4641FNBS3FAR bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64;
4642FNBS3FAR bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64;
4643PROTO_ALL(bs3CpuBasic2_retn_opsize_end);
4644#undef PROTO_ALL
4645
4646
4647static void bs3CpuBasic2_retn_PrepStack(BS3PTRUNION StkPtr, PCBS3REGCTX pCtxExpected, uint8_t cbAddr)
4648{
4649 StkPtr.pu32[3] = UINT32_MAX;
4650 StkPtr.pu32[2] = UINT32_MAX;
4651 StkPtr.pu32[1] = UINT32_MAX;
4652 StkPtr.pu32[0] = UINT32_MAX;
4653 StkPtr.pu32[-1] = UINT32_MAX;
4654 StkPtr.pu32[-2] = UINT32_MAX;
4655 StkPtr.pu32[-3] = UINT32_MAX;
4656 StkPtr.pu32[-4] = UINT32_MAX;
4657 if (cbAddr == 2)
4658 StkPtr.pu16[0] = pCtxExpected->rip.u16;
4659 else if (cbAddr == 4)
4660 StkPtr.pu32[0] = pCtxExpected->rip.u32;
4661 else
4662 StkPtr.pu64[0] = pCtxExpected->rip.u64;
4663}
4664
4665
4666/**
4667 * Entrypoint for NEAR RET tests.
4668 *
4669 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4670 * @param bMode The CPU mode we're testing.
4671 */
4672BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_near_ret)(uint8_t bMode)
4673{
4674 BS3TRAPFRAME TrapCtx;
4675 BS3REGCTX Ctx;
4676 BS3REGCTX CtxExpected;
4677 unsigned iTest;
4678 BS3PTRUNION StkPtr;
4679
4680 /* make sure they're allocated */
4681 Bs3MemZero(&Ctx, sizeof(Ctx));
4682 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4683 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4684
4685 bs3CpuBasic2_SetGlobals(bMode);
4686
4687 /*
4688 * Create a context.
4689 *
4690 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
4691 */
4692 Bs3RegCtxSaveEx(&Ctx, bMode, 1664);
4693 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
4694 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4695
4696 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
4697 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
4698
4699 /*
4700 * 16-bit tests.
4701 */
4702 if (BS3_MODE_IS_16BIT_CODE(bMode))
4703 {
4704 static struct
4705 {
4706 bool fOpSizePfx;
4707 uint16_t cbImm;
4708 FPFNBS3FAR pfnTest;
4709 }
4710 const s_aTests[] =
4711 {
4712 { false, 0, bs3CpuBasic2_retn__ud2_c16, },
4713 { true, 0, bs3CpuBasic2_retn_opsize__ud2_c16, },
4714 { false, 24, bs3CpuBasic2_retn_i24__ud2_c16, },
4715 { true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c16, },
4716 { false, 0, bs3CpuBasic2_retn_i0__ud2_c16, },
4717 { true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c16, },
4718 { false,760, bs3CpuBasic2_retn_i760__ud2_c16, },
4719 };
4720
4721 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4722 {
4723 uint8_t const BS3_FAR *fpbCode;
4724
4725 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4726 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4727 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4728 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4729 CtxExpected.cs = Ctx.cs;
4730 if (!s_aTests[iTest].fOpSizePfx)
4731 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4732 else
4733 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4734 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4735 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4736 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4737 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4738 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4739 g_usBs3TestStep++;
4740
4741 /* Again single stepping: */
4742 //Bs3TestPrintf("stepping...\n");
4743 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4744 Ctx.rflags.u16 |= X86_EFL_TF;
4745 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4746 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4747 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4748 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4749 Ctx.rflags.u16 &= ~X86_EFL_TF;
4750 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4751 g_usBs3TestStep++;
4752 }
4753 }
4754 /*
4755 * 32-bit tests.
4756 */
4757 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4758 {
4759 static struct
4760 {
4761 uint8_t cBits;
4762 bool fOpSizePfx;
4763 uint16_t cbImm;
4764 FPFNBS3FAR pfnTest;
4765 }
4766 const s_aTests[] =
4767 {
4768 { 32, false, 0, bs3CpuBasic2_retn__ud2_c32, },
4769 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c32, },
4770 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c32, },
4771 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c32, },
4772 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c32, },
4773 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c32, },
4774 { 32, false,760, bs3CpuBasic2_retn_i760__ud2_c32, },
4775 };
4776
4777 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4778 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c32);
4779 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c32) - offLow;
4780 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4781 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4782 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4783 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4784 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4785 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4786 if (s_aTests[iTest].fOpSizePfx)
4787 {
4788 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4789 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4790 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4791 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4792 pbCode16[offUd + 1] = 0xf1;
4793 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4794 pbLow[offUd + 1] = 0x0b;
4795 }
4796
4797 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4798 {
4799 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4800
4801 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4802 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4803 CtxExpected.cs = Ctx.cs;
4804 if (!s_aTests[iTest].fOpSizePfx)
4805 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4806 else
4807 {
4808 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4809 CtxExpected.rip.u &= UINT16_MAX;
4810 }
4811 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4812 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4813 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4814 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4815 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4816 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4817 g_usBs3TestStep++;
4818
4819 /* Again single stepping: */
4820 //Bs3TestPrintf("stepping...\n");
4821 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4822 Ctx.rflags.u16 |= X86_EFL_TF;
4823 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4824 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4825 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4826 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4827 Ctx.rflags.u16 &= ~X86_EFL_TF;
4828 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4829 g_usBs3TestStep++;
4830 }
4831 }
4832 /*
4833 * 64-bit tests.
4834 */
4835 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4836 {
4837 static struct
4838 {
4839 uint8_t cBits;
4840 bool fOpSizePfx;
4841 uint16_t cbImm;
4842 FPFNBS3FAR pfnTest;
4843 }
4844 const s_aTests[] =
4845 {
4846 { 32, false, 0, bs3CpuBasic2_retn__ud2_c64, },
4847 { 32, false, 0, bs3CpuBasic2_retn_rexw__ud2_c64, },
4848 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c64, },
4849 { 32, false, 0, bs3CpuBasic2_retn_opsize_rexw__ud2_c64, },
4850 { 32, true, 0, bs3CpuBasic2_retn_rexw_opsize__ud2_c64, },
4851 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c64, },
4852 { 32, false, 24, bs3CpuBasic2_retn_i24_rexw__ud2_c64, },
4853 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c64, },
4854 { 32, false, 24, bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64, },
4855 { 32, true, 24, bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64, },
4856 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c64, },
4857 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c64, },
4858 { 32, false,760, bs3CpuBasic2_retn_i760__ud2_c64, },
4859 };
4860 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4861 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4862
4863 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed
4864 tests, unless we're on intel where the opsize prefix is ignored. Here we
4865 just fill low memory with int3's so we can detect non-intel behaviour. */
4866 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c64);
4867 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c64) - offLow;
4868 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4869 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4870 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4871 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4872 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4873 if (!fFix64OpSize)
4874 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4875 if (s_aTests[iTest].fOpSizePfx)
4876 {
4877 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4878 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4879 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4880 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4881 pbCode16[offUd + 1] = 0xf1;
4882 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4883 pbLow[offUd + 1] = 0x0b;
4884 }
4885
4886 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4887 {
4888 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4889
4890 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4891 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4892 CtxExpected.cs = Ctx.cs;
4893 if (!s_aTests[iTest].fOpSizePfx || fFix64OpSize)
4894 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 8;
4895 else
4896 {
4897 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4898 CtxExpected.rip.u &= UINT16_MAX;
4899 }
4900 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4901 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4902 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4903 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4904 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4905 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4906 g_usBs3TestStep++;
4907
4908 /* Again single stepping: */
4909 //Bs3TestPrintf("stepping...\n");
4910 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4911 Ctx.rflags.u16 |= X86_EFL_TF;
4912 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4913 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4914 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4915 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4916 Ctx.rflags.u16 &= ~X86_EFL_TF;
4917 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4918 g_usBs3TestStep++;
4919 }
4920 }
4921 else
4922 Bs3TestFailed("wtf?");
4923
4924 return 0;
4925}
4926
4927
4928/*********************************************************************************************************************************
4929* Far RET *
4930*********************************************************************************************************************************/
4931#define PROTO_ALL(a_Template) \
4932 FNBS3FAR a_Template ## _c16, \
4933 a_Template ## _c32, \
4934 a_Template ## _c64
4935PROTO_ALL(bs3CpuBasic2_retf);
4936PROTO_ALL(bs3CpuBasic2_retf_opsize);
4937FNBS3FAR bs3CpuBasic2_retf_rexw_c64;
4938FNBS3FAR bs3CpuBasic2_retf_rexw_opsize_c64;
4939FNBS3FAR bs3CpuBasic2_retf_opsize_rexw_c64;
4940PROTO_ALL(bs3CpuBasic2_retf_i32);
4941PROTO_ALL(bs3CpuBasic2_retf_i32_opsize);
4942FNBS3FAR bs3CpuBasic2_retf_i24_rexw_c64;
4943FNBS3FAR bs3CpuBasic2_retf_i24_rexw_opsize_c64;
4944FNBS3FAR bs3CpuBasic2_retf_i24_opsize_rexw_c64;
4945PROTO_ALL(bs3CpuBasic2_retf_i888);
4946#undef PROTO_ALL
4947
4948
4949static void bs3CpuBasic2_retf_PrepStack(BS3PTRUNION StkPtr, uint8_t cbStkItem, RTSEL uRetCs, uint64_t uRetRip,
4950 bool fWithStack, uint16_t cbImm, RTSEL uRetSs, uint64_t uRetRsp)
4951{
4952 Bs3MemSet(&StkPtr.pu32[-4], 0xff, 96);
4953 if (cbStkItem == 2)
4954 {
4955 StkPtr.pu16[0] = (uint16_t)uRetRip;
4956 StkPtr.pu16[1] = uRetCs;
4957 if (fWithStack)
4958 {
4959 StkPtr.pb += cbImm;
4960 StkPtr.pu16[2] = (uint16_t)uRetRsp;
4961 StkPtr.pu16[3] = uRetSs;
4962 }
4963 }
4964 else if (cbStkItem == 4)
4965 {
4966 StkPtr.pu32[0] = (uint32_t)uRetRip;
4967 StkPtr.pu16[2] = uRetCs;
4968 if (fWithStack)
4969 {
4970 StkPtr.pb += cbImm;
4971 StkPtr.pu32[2] = (uint32_t)uRetRsp;
4972 StkPtr.pu16[6] = uRetSs;
4973 }
4974 }
4975 else
4976 {
4977 StkPtr.pu64[0] = uRetRip;
4978 StkPtr.pu16[4] = uRetCs;
4979 if (fWithStack)
4980 {
4981 StkPtr.pb += cbImm;
4982 StkPtr.pu64[2] = uRetRsp;
4983 StkPtr.pu16[12] = uRetSs;
4984 }
4985 }
4986}
4987
4988
4989/**
4990 * Entrypoint for FAR RET tests.
4991 *
4992 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4993 * @param bMode The CPU mode we're testing.
4994 */
4995BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_ret)(uint8_t bMode)
4996{
4997 BS3TRAPFRAME TrapCtx;
4998 BS3REGCTX Ctx;
4999 BS3REGCTX Ctx2;
5000 BS3REGCTX CtxExpected;
5001 unsigned iTest;
5002 unsigned iSubTest;
5003 BS3PTRUNION StkPtr;
5004
5005#define LOW_UD_ADDR 0x0609
5006 uint8_t BS3_FAR * const pbLowUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_UD_ADDR);
5007#define LOW_SALC_UD_ADDR 0x0611
5008 uint8_t BS3_FAR * const pbLowSalcUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_SALC_UD_ADDR);
5009#define LOW_SWAPGS_ADDR 0x061d
5010 uint8_t BS3_FAR * const pbLowSwapGs = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_SWAPGS_ADDR);
5011#define BS3TEXT16_ADDR_HI (BS3_ADDR_BS3TEXT16 >> 16)
5012
5013 /* make sure they're allocated */
5014 Bs3MemZero(&Ctx, sizeof(Ctx));
5015 Bs3MemZero(&Ctx2, sizeof(Ctx2));
5016 Bs3MemZero(&CtxExpected, sizeof(CtxExpected));
5017 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
5018
5019 bs3CpuBasic2_SetGlobals(bMode);
5020
5021 //if (!BS3_MODE_IS_64BIT_SYS(bMode) && bMode != BS3_MODE_PP32_16) return 0xff;
5022 //if (bMode != BS3_MODE_PE32_16) return 0xff;
5023
5024 /*
5025 * When dealing retf with 16-bit effective operand size to 32-bit or 64-bit
5026 * code, we're restricted to a 16-bit address. So, we plant a UD
5027 * instruction below 64KB that we can target with flat 32/64 code segments.
5028 * (Putting it on the stack would be possible too, but we'd have to create
5029 * the sub-test tables dynamically, which isn't necessary.)
5030 */
5031 Bs3MemSet(&pbLowUd[-9], 0xcc, 32);
5032 Bs3MemSet(&pbLowSalcUd[-9], 0xcc, 32);
5033 Bs3MemSet(&pbLowSwapGs[-9], 0xcc, 32);
5034
5035 pbLowUd[0] = 0x0f; /* ud2 */
5036 pbLowUd[1] = 0x0b;
5037
5038 /* A variation to detect whether we're in 64-bit or 16-bit mode when
5039 executing the code. */
5040 pbLowSalcUd[0] = 0xd6; /* salc */
5041 pbLowSalcUd[1] = 0x0f; /* ud2 */
5042 pbLowSalcUd[2] = 0x0b;
5043
5044 /* A variation to check that we're not in 64-bit mode. */
5045 pbLowSwapGs[0] = 0x0f; /* swapgs */
5046 pbLowSwapGs[1] = 0x01;
5047 pbLowSwapGs[2] = 0xf8;
5048
5049 /*
5050 * Use separate stacks for all relevant CPU exceptions so we can put
5051 * garbage in unused RSP bits w/o needing to care about where a long mode
5052 * handler will end up when accessing the whole RSP. (Not an issue with
5053 * 16-bit and 32-bit protected mode kernels, as here the weird SS based
5054 * stack pointer handling is in effect and the exception handler code
5055 * will just continue using the same SS and same portion of RSP.)
5056 *
5057 * See r154660.
5058 */
5059 if (BS3_MODE_IS_64BIT_SYS(bMode))
5060 Bs3Trap64InitEx(true);
5061
5062 /*
5063 * Create some call gates and whatnot for the UD2 code using the spare selectors.
5064 */
5065 if (BS3_MODE_IS_64BIT_SYS(bMode))
5066 for (iTest = 0; iTest < 16; iTest++)
5067 Bs3SelSetupGate64(&Bs3GdteSpare00 + iTest * 2, iTest /*bType*/, 3 /*bDpl*/,
5068 BS3_SEL_R0_CS64, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16);
5069 else
5070 {
5071 for (iTest = 0; iTest < 16; iTest++)
5072 {
5073 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest, iTest /*bType*/, 3 /*bDpl*/,
5074 BS3_SEL_R0_CS16, BS3_FP_OFF(bs3CpuBasic2_ud2), 0);
5075 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest + 16, iTest /*bType*/, 3 /*bDpl*/,
5076 BS3_SEL_R0_CS32, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16, 0);
5077 }
5078 }
5079
5080 /*
5081 * Create a context.
5082 *
5083 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
5084 */
5085 Bs3RegCtxSaveEx(&Ctx, bMode, 1728);
5086 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
5087 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
5088
5089 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
5090 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
5091
5092 /*
5093 * 16-bit tests.
5094 */
5095 if (BS3_MODE_IS_16BIT_CODE(bMode))
5096 {
5097 static struct
5098 {
5099 bool fOpSizePfx;
5100 uint16_t cbImm;
5101 FPFNBS3FAR pfnTest;
5102 } const s_aTests[] =
5103 {
5104 { false, 0, bs3CpuBasic2_retf_c16, },
5105 { true, 0, bs3CpuBasic2_retf_opsize_c16, },
5106 { false, 32, bs3CpuBasic2_retf_i32_c16, },
5107 { true, 32, bs3CpuBasic2_retf_i32_opsize_c16, },
5108 { false,888, bs3CpuBasic2_retf_i888_c16, },
5109 };
5110
5111 static struct
5112 {
5113 bool fRmOrV86;
5114 bool fInterPriv;
5115 int8_t iXcpt;
5116 RTSEL uStartSs;
5117 uint8_t cDstBits;
5118 RTSEL uDstCs;
5119 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5120 {
5121 uint32_t offDst;
5122 struct
5123 {
5124 NPVOID pv;
5125 uint16_t uHigh;
5126 } s;
5127 };
5128 RTSEL uDstSs;
5129 uint16_t uErrCd;
5130 } const s_aSubTests[] =
5131 { /* rm/v86, PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5132 { true, false, -1, 0, 16, BS3_SEL_TEXT16, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, 0, 0 },
5133 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_TEXT16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5134 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5135 { false, false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5136 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5137 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5138 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5139 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5140 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5141 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5142 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5143 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5144 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5145 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5146 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5147 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5148 /* conforming stuff */
5149 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5150 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5151 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5152 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5153 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5154 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS16_CNF },
5155 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R1_CS16_CNF },
5156 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5157 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5158 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5159 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5160 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5161 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5162 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5163 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5164 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5165 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5166 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5167 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5168 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5169 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5170 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5171 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5172 /* returning to 32-bit code: */
5173 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5174 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS16 | 0, 0 },
5175 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5176 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5177 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5178 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5179 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5180 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5181 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5182 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5183 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5184 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5185 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5186 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5187 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5188 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5189 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5190 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5191 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5192 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5193 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5194 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5195 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5196 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5197 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5198 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5199 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5200 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5201 /* returning to 32-bit conforming code: */
5202 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5203 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5204 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5205 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5206 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5207 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5208 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5209 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5210 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS32_CNF },
5211 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5212 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5213 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5214 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5215 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5216 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5217 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5218 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS32_CNF },
5219 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS32_CNF },
5220 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5221 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5222 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS32_CNF },
5223 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS32_CNF },
5224 { false, true, 42, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS32_CNF },
5225 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5226 /* returning to 64-bit code or 16-bit when not in long mode: */
5227 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5228 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5229 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5230 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5231 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5232 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5233 { false, false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5234 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5235 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5236 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5237 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5238 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5239 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5240 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5241 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5242 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5243 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5244 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5245 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5246 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5247 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5248 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5249
5250 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5251 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5252 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5253 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5254 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5255 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5256 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5257
5258 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5259 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5260 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5261 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5262
5263 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5264 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5265 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5266 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5267
5268 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5269 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5270 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_TSS32_DF | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_TSS32_DF },
5271 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_00 },
5272 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_01 },
5273 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_02 },
5274 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_03 },
5275 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_04 },
5276 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_05 },
5277 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_06 },
5278 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_07 },
5279 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_08 },
5280 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_09 },
5281 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0a },
5282 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0b },
5283 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0c },
5284 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0d },
5285 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0e },
5286 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0f },
5287 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_10 },
5288 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_11 },
5289 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_12 },
5290 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_13 },
5291 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_14 },
5292 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_15 },
5293 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_16 },
5294 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_17 },
5295 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_18 },
5296 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_19 },
5297 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1a },
5298 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1b },
5299 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1c },
5300 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1d },
5301 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1e },
5302 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1f },
5303 };
5304
5305 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
5306 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
5307
5308 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5309 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5310 {
5311 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5312
5313 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5314 {
5315 g_usBs3TestStep = (iTest << 12) | (iSubTest << 4);
5316 if ( s_aSubTests[iSubTest].fRmOrV86 == fRmOrV86
5317 && (s_aSubTests[iSubTest].offDst <= UINT16_MAX || s_aTests[iTest].fOpSizePfx))
5318 {
5319 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5320 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 4 : 2;
5321 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5322 uint32_t const uFlatDst = Bs3SelFar32ToFlat32(s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstCs)
5323 + (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode));
5324 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5325 uint64_t uDstRspExpect, uDstRspPush;
5326 uint16_t cErrors;
5327
5328 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5329 if (Ctx.ss != BS3_SEL_R0_SS32)
5330 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5331 else
5332 Ctx.rsp.u32 &= UINT16_MAX;
5333 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5334 if (s_aSubTests[iSubTest].fInterPriv)
5335 {
5336 if (s_aTests[iTest].fOpSizePfx)
5337 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5338 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5339 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5340 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5341 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5342 {
5343 if (s_aTests[iTest].fOpSizePfx)
5344 uDstRspExpect = uDstRspPush;
5345 else
5346 uDstRspExpect &= UINT16_MAX;
5347 }
5348 }
5349
5350 CtxExpected.bCpl = Ctx.bCpl;
5351 CtxExpected.cs = Ctx.cs;
5352 CtxExpected.ss = Ctx.ss;
5353 CtxExpected.ds = Ctx.ds;
5354 CtxExpected.es = Ctx.es;
5355 CtxExpected.fs = Ctx.fs;
5356 CtxExpected.gs = Ctx.gs;
5357 CtxExpected.rip.u = Ctx.rip.u;
5358 CtxExpected.rsp.u = Ctx.rsp.u;
5359 CtxExpected.rax.u = Ctx.rax.u;
5360 if (s_aSubTests[iSubTest].iXcpt < 0)
5361 {
5362 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
5363 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
5364 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5365 {
5366 CtxExpected.rip.u += 1;
5367 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
5368 }
5369 CtxExpected.ss = uDstSs;
5370 CtxExpected.rsp.u = uDstRspExpect;
5371 if (s_aSubTests[iSubTest].fInterPriv)
5372 {
5373 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
5374 unsigned cSels = 4;
5375 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
5376 while (cSels-- > 0)
5377 {
5378 uint16_t uSel = *puSel;
5379 if ( (uSel & X86_SEL_MASK_OFF_RPL)
5380 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
5381 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5382 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5383 *puSel = 0;
5384 puSel++;
5385 }
5386 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
5387 }
5388 }
5389 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5390 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
5391 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]\n", Ctx.ss, Ctx.rsp.u, CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush);
5392 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5393 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5394 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5395 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
5396 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
5397 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5398 if (s_aSubTests[iSubTest].iXcpt < 0)
5399 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5400 else
5401 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5402 g_usBs3TestStep++; /* 1 */
5403
5404 /* Bad hw bp: Setup DR0-3 but use invalid length encodings (non-byte) */
5405 //Bs3TestPrintf("hw bp: bad len\n");
5406 Bs3RegSetDr0(uFlatDst);
5407 Bs3RegSetDr1(uFlatDst);
5408 Bs3RegSetDr2(uFlatDst);
5409 Bs3RegSetDr3(uFlatDst);
5410 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5411 Bs3RegSetDr7(X86_DR7_INIT_VAL
5412 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_WORD) | X86_DR7_L_G(1)
5413 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_DWORD) | X86_DR7_L_G(2)
5414 | ( BS3_MODE_IS_64BIT_SYS(bMode)
5415 ? X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_QWORD) | X86_DR7_L_G(3) : 0) );
5416 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5417 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5418 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5419 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5420 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5421 if (s_aSubTests[iSubTest].iXcpt < 0)
5422 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5423 else
5424 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5425 bs3CpuBasic2_CheckDr6InitVal();
5426 g_usBs3TestStep++; /* 2 */
5427
5428 /* Bad hw bp: setup DR0-3 but don't enable them */
5429 //Bs3TestPrintf("hw bp: disabled\n");
5430 //Bs3RegSetDr0(uFlatDst);
5431 //Bs3RegSetDr1(uFlatDst);
5432 //Bs3RegSetDr2(uFlatDst);
5433 //Bs3RegSetDr3(uFlatDst);
5434 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5435 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5436 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5437 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5438 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5439 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5440 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5441 if (s_aSubTests[iSubTest].iXcpt < 0)
5442 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5443 else
5444 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5445 bs3CpuBasic2_CheckDr6InitVal();
5446 g_usBs3TestStep++; /* 3 */
5447
5448 /* Bad hw bp: Points at 2nd byte in the UD2. Docs says it only works when pointing at first byte. */
5449 //Bs3TestPrintf("hw bp: byte 2\n");
5450 Bs3RegSetDr0(uFlatDst + 1);
5451 Bs3RegSetDr1(uFlatDst + 1);
5452 //Bs3RegSetDr2(uFlatDst);
5453 //Bs3RegSetDr3(uFlatDst);
5454 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5455 Bs3RegSetDr7(X86_DR7_INIT_VAL
5456 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_L_G(0)
5457 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1));
5458 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5459 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5460 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5461 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5462 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5463 if (s_aSubTests[iSubTest].iXcpt < 0)
5464 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5465 else
5466 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5467 bs3CpuBasic2_CheckDr6InitVal();
5468 g_usBs3TestStep++; /* 4 */
5469
5470 /* Again with two correctly hardware breakpoints and a disabled one that just matches the address: */
5471 //Bs3TestPrintf("bp 1 + 3...\n");
5472 Bs3RegSetDr0(uFlatDst);
5473 Bs3RegSetDr1(uFlatDst);
5474 Bs3RegSetDr2(0);
5475 Bs3RegSetDr3(uFlatDst);
5476 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5477 Bs3RegSetDr7(X86_DR7_INIT_VAL
5478 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5479 | X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_L_G(3) );
5480 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5481 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5482 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5483 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5484 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5485 if (s_aSubTests[iSubTest].iXcpt < 0)
5486 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected,
5487 enmCpuVendor == BS3CPUVENDOR_AMD ? X86_DR6_B1 | X86_DR6_B3 /* 3990x */
5488 : X86_DR6_B0 | X86_DR6_B1 | X86_DR6_B3);
5489 else
5490 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5491 g_usBs3TestStep++; /* 5 */
5492
5493 /* Again with a single locally enabled breakpoint. */
5494 //Bs3TestPrintf("bp 0/l...\n");
5495 Bs3RegSetDr0(uFlatDst);
5496 Bs3RegSetDr1(0);
5497 Bs3RegSetDr2(0);
5498 Bs3RegSetDr3(0);
5499 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_B1 | X86_DR6_B2 | X86_DR6_B3 | X86_DR6_BS);
5500 Bs3RegSetDr7(X86_DR7_INIT_VAL
5501 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_L(0));
5502 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5503 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5504 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5505 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5506 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5507 if (s_aSubTests[iSubTest].iXcpt < 0)
5508 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_B0 | X86_DR6_BS); /* B0-B3 set, BS preserved */
5509 else
5510 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5511 g_usBs3TestStep++; /* 6 */
5512
5513 /* Again with a single globally enabled breakpoint and serveral other types of breakpoints
5514 configured but not enabled. */
5515 //Bs3TestPrintf("bp 2/g+...\n");
5516 cErrors = Bs3TestSubErrorCount();
5517 Bs3RegSetDr0(uFlatDst);
5518 Bs3RegSetDr1(uFlatDst);
5519 Bs3RegSetDr2(uFlatDst);
5520 Bs3RegSetDr3(uFlatDst);
5521 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_BS | X86_DR6_BD | X86_DR6_BT | X86_DR6_B2);
5522 Bs3RegSetDr7(X86_DR7_INIT_VAL
5523 | X86_DR7_RW(0, X86_DR7_RW_RW) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE)
5524 | X86_DR7_RW(1, X86_DR7_RW_RW) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5525 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_BYTE) | X86_DR7_G(2)
5526 | X86_DR7_RW(3, X86_DR7_RW_WO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_G(3)
5527 );
5528 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5529 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5530 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5531 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5532 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5533 if (s_aSubTests[iSubTest].iXcpt < 0)
5534 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_B2 | X86_DR6_BS | X86_DR6_BD | X86_DR6_BT);
5535 else
5536 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5537 g_usBs3TestStep++; /* 7 */
5538
5539 /* Now resume it with lots of execution breakpoints configured. */
5540 if (s_aSubTests[iSubTest].iXcpt < 0 && Bs3TestSubErrorCount() == cErrors)
5541 {
5542 Bs3MemCpy(&Ctx2, &TrapCtx.Ctx, sizeof(Ctx2));
5543 Ctx2.rflags.u32 |= X86_EFL_RF;
5544 //Bs3TestPrintf("bp 3/g+rf %04RX16:%04RX64 efl=%RX32 ds=%04RX16...\n", Ctx2.cs, Ctx2.rip.u, Ctx2.rflags.u32, Ctx2.ds);
5545 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5546 Bs3RegSetDr7(X86_DR7_INIT_VAL
5547 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE)
5548 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5549 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_BYTE) | X86_DR7_G(2)
5550 | X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_G(3)
5551 );
5552 Bs3TrapSetJmpAndRestore(&Ctx2, &TrapCtx);
5553 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5554 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5555 bs3CpuBasic2_CheckDr6InitVal();
5556 }
5557 g_usBs3TestStep++; /* 8 */
5558
5559 /* Now do single stepping: */
5560 //Bs3TestPrintf("stepping...\n");
5561 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5562 Ctx.rflags.u16 |= X86_EFL_TF;
5563 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5564 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5565 {
5566 CtxExpected.rip.u -= 1;
5567 CtxExpected.rax.u = Ctx.rax.u;
5568 }
5569 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5570 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5571 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5572 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5573 if (s_aSubTests[iSubTest].iXcpt < 0)
5574 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5575 else
5576 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5577 Ctx.rflags.u16 &= ~X86_EFL_TF;
5578 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5579 g_usBs3TestStep++; /* 9 */
5580
5581 /* Single step with B0-B3 set to check that they're not preserved
5582 and with BD & BT to check that they are (checked on Intel 6700K): */
5583 //Bs3TestPrintf("stepping b0-b3+bd+bt=1...\n");
5584 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_B_MASK | X86_DR6_BD | X86_DR6_BT);
5585 Ctx.rflags.u16 |= X86_EFL_TF;
5586 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5587 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5588 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5589 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5590 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5591 if (s_aSubTests[iSubTest].iXcpt < 0)
5592 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS | X86_DR6_BD | X86_DR6_BT);
5593 else
5594 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5595 Ctx.rflags.u16 &= ~X86_EFL_TF;
5596 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5597 g_usBs3TestStep++; /* 10 */
5598
5599 }
5600 }
5601 }
5602 }
5603 /*
5604 * 32-bit tests.
5605 */
5606 else if (BS3_MODE_IS_32BIT_CODE(bMode))
5607 {
5608 static struct
5609 {
5610 bool fOpSizePfx;
5611 uint16_t cbImm;
5612 FPFNBS3FAR pfnTest;
5613 } const s_aTests[] =
5614 {
5615 { false, 0, bs3CpuBasic2_retf_c32, },
5616 { true, 0, bs3CpuBasic2_retf_opsize_c32, },
5617 { false, 32, bs3CpuBasic2_retf_i32_c32, },
5618 { true, 32, bs3CpuBasic2_retf_i32_opsize_c32, },
5619 { false,888, bs3CpuBasic2_retf_i888_c32, },
5620 };
5621
5622 static struct
5623 {
5624 bool fInterPriv;
5625 int8_t iXcpt;
5626 RTSEL uStartSs;
5627 uint8_t cDstBits;
5628 RTSEL uDstCs;
5629 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5630 {
5631 uint32_t offDst;
5632 struct
5633 {
5634 NPVOID pv;
5635 uint16_t uHigh;
5636 } s;
5637 };
5638 RTSEL uDstSs;
5639 uint16_t uErrCd;
5640 } const s_aSubTests[] =
5641 { /* PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5642 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5643 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5644 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5645 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5646 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5647 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5648 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5649 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5650 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5651 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5652 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5653 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5654 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5655 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5656 /* same with 32-bit wide target addresses: */
5657 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5658 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5659 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5660 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5661 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5662 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5663 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5664 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5665 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5666 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5667 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5668 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5669 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5670 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5671 /* conforming stuff */
5672 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5673 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5674 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5675 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5676 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5677 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS32_CNF },
5678 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R1_CS32_CNF },
5679 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5680 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5681 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5682 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5683 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5684 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5685 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5686 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5687 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5688 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5689 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5690 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5691 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5692 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5693 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5694 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5695 /* returning to 16-bit code: */
5696 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5697 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5698 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5699 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5700 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5701 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5702 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5703 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS16 | 0, 0 },
5704 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5705 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5706 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5707 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5708 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5709 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5710 /* returning to 16-bit conforming code: */
5711 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5712 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5713 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5714 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5715 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5716 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5717 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5718 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5719 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS16_CNF },
5720 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5721 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5722 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5723 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5724 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5725 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5726 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5727 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS16_CNF },
5728 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS16_CNF },
5729 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5730 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5731 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS16_CNF },
5732 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS16_CNF },
5733 { true, 42, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS16_CNF },
5734 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5735 /* returning to 64-bit code or 16-bit when not in long mode: */
5736 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5737 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5738 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5739 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5740 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5741 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5742 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5743 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5744 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5745 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5746 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5747 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5748 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5749 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5750 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5751 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5752 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5753 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5754 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5755 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5756 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5757 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5758
5759 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5760 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5761 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5762 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5763 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5764 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5765 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5766
5767 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5768 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5769 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5770 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5771
5772 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5773 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5774 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5775 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5776
5777 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5778 { true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5779 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_00 },
5780 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_01 },
5781 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_02 },
5782 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_03 },
5783 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_04 },
5784 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_05 },
5785 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_06 },
5786 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_07 },
5787 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_08 },
5788 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_09 },
5789 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0a },
5790 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0b },
5791 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0c },
5792 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0d },
5793 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0e },
5794 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0f },
5795 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_10 },
5796 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_11 },
5797 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_12 },
5798 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_13 },
5799 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_14 },
5800 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_15 },
5801 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_16 },
5802 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_17 },
5803 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_18 },
5804 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_19 },
5805 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1a },
5806 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1b },
5807 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1c },
5808 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1d },
5809 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1e },
5810 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1f },
5811 };
5812
5813 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5814 {
5815 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5816 //Bs3TestPrintf("-------------- #%u: cs:eip=%04RX16:%08RX64 imm=%u%s\n",
5817 // iTest, Ctx.cs, Ctx.rip.u, s_aTests[iTest].cbImm, s_aTests[iTest].fOpSizePfx ? " o16" : "");
5818
5819 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5820 {
5821 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
5822 if (!s_aTests[iTest].fOpSizePfx || s_aSubTests[iSubTest].offDst <= UINT16_MAX)
5823 {
5824 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5825 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 2 : 4;
5826 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5827 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5828 uint64_t uDstRspExpect, uDstRspPush;
5829 //Bs3TestPrintf(" #%u: %s %d %#04RX16 -> %u %#04RX16:%#04RX32 %#04RX16 %#RX16\n", iSubTest, s_aSubTests[iSubTest].fInterPriv ? "priv" : "same", s_aSubTests[iSubTest].iXcpt, s_aSubTests[iSubTest].uStartSs,
5830 // s_aSubTests[iSubTest].cDstBits, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstSs, s_aSubTests[iSubTest].uErrCd);
5831
5832 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5833 if (Ctx.ss != BS3_SEL_R0_SS32)
5834 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5835 else
5836 Ctx.rsp.u32 &= UINT16_MAX;
5837 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5838 if (s_aSubTests[iSubTest].fInterPriv)
5839 {
5840 if (!s_aTests[iTest].fOpSizePfx)
5841 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5842 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5843 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5844 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5845 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5846 {
5847 if (!s_aTests[iTest].fOpSizePfx)
5848 uDstRspExpect = uDstRspPush;
5849 else
5850 uDstRspExpect &= UINT16_MAX;
5851 }
5852 }
5853
5854 CtxExpected.bCpl = Ctx.bCpl;
5855 CtxExpected.cs = Ctx.cs;
5856 CtxExpected.ss = Ctx.ss;
5857 CtxExpected.ds = Ctx.ds;
5858 CtxExpected.es = Ctx.es;
5859 CtxExpected.fs = Ctx.fs;
5860 CtxExpected.gs = Ctx.gs;
5861 CtxExpected.rip.u = Ctx.rip.u;
5862 CtxExpected.rsp.u = Ctx.rsp.u;
5863 CtxExpected.rax.u = Ctx.rax.u;
5864 if (s_aSubTests[iSubTest].iXcpt < 0)
5865 {
5866 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
5867 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
5868 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5869 {
5870 CtxExpected.rip.u += 1;
5871 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
5872 }
5873 CtxExpected.ss = uDstSs;
5874 CtxExpected.rsp.u = uDstRspExpect;
5875 if (s_aSubTests[iSubTest].fInterPriv)
5876 {
5877 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
5878 unsigned cSels = 4;
5879 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
5880 while (cSels-- > 0)
5881 {
5882 uint16_t uSel = *puSel;
5883 if ( (uSel & X86_SEL_MASK_OFF_RPL)
5884 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
5885 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5886 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5887 *puSel = 0;
5888 puSel++;
5889 }
5890 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
5891 }
5892 }
5893 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5894 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]; %04RX16:%04RX64\n",Ctx.ss, Ctx.rsp.u,
5895 // CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush, CtxExpected.cs, CtxExpected.rip.u);
5896 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5897 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5898 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5899 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
5900 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
5901 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5902 if (s_aSubTests[iSubTest].iXcpt < 0)
5903 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5904 else
5905 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5906 g_usBs3TestStep++;
5907
5908 /* Again single stepping: */
5909 //Bs3TestPrintf("stepping...\n");
5910 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5911 Ctx.rflags.u16 |= X86_EFL_TF;
5912 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5913 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5914 {
5915 CtxExpected.rip.u -= 1;
5916 CtxExpected.rax.u = Ctx.rax.u;
5917 }
5918 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5919 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5920 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5921 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5922 if (s_aSubTests[iSubTest].iXcpt < 0)
5923 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5924 else
5925 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5926 Ctx.rflags.u16 &= ~X86_EFL_TF;
5927 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5928 g_usBs3TestStep++;
5929 }
5930 }
5931 }
5932 }
5933 /*
5934 * 64-bit tests.
5935 */
5936 else if (BS3_MODE_IS_64BIT_CODE(bMode))
5937 {
5938 static struct
5939 {
5940 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W; Effective op size prefix. */
5941 uint16_t cbImm;
5942 FPFNBS3FAR pfnTest;
5943 } const s_aTests[] =
5944 {
5945 { 0, 0, bs3CpuBasic2_retf_c64, },
5946 { 1, 0, bs3CpuBasic2_retf_opsize_c64, },
5947 { 0, 32, bs3CpuBasic2_retf_i32_c64, },
5948 { 1, 32, bs3CpuBasic2_retf_i32_opsize_c64, },
5949 { 2, 0, bs3CpuBasic2_retf_rexw_c64, },
5950 { 2, 0, bs3CpuBasic2_retf_opsize_rexw_c64, },
5951 { 1, 0, bs3CpuBasic2_retf_rexw_opsize_c64, },
5952 { 2, 24, bs3CpuBasic2_retf_i24_rexw_c64, },
5953 { 2, 24, bs3CpuBasic2_retf_i24_opsize_rexw_c64, },
5954 { 1, 24, bs3CpuBasic2_retf_i24_rexw_opsize_c64, },
5955 { 0,888, bs3CpuBasic2_retf_i888_c64, },
5956 };
5957
5958 static struct
5959 {
5960 bool fInterPriv;
5961 int8_t iXcpt;
5962 RTSEL uStartSs;
5963 uint8_t cDstBits;
5964 RTSEL uDstCs;
5965 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5966 {
5967 uint32_t offDst;
5968 struct
5969 {
5970 NPVOID pv;
5971 uint16_t uHigh;
5972 } s;
5973 };
5974 RTSEL uDstSs;
5975 uint16_t uErrCd;
5976 } const s_aSubTests[] =
5977 { /* PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5978 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5979 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5980 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5981 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5982 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5983 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5984 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5985 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5986 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5987 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5988 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5989 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5990 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5991 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5992 /* same with 32-bit wide target addresses: */
5993 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5994 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5995 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5996 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5997 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5998 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5999 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6000 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6001 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6002 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6003 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6004 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6005 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6006 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6007 /* conforming stuff */
6008 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6009 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6010 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6011 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6012 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6013 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS64_CNF },
6014 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R1_CS64_CNF },
6015 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6016 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6017 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6018 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS64_CNF },
6019 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS64_CNF },
6020 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS64_CNF },
6021 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS64_CNF },
6022 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6023 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6024 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS64_CNF },
6025 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS64_CNF },
6026 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS64_CNF },
6027 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS64_CNF },
6028 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS64_CNF },
6029 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_CS64_CNF },
6030 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6031 /* returning to 16-bit code: */
6032 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
6033 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6034 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6035 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6036 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6037 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6038 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6039 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS16 | 0, 0 },
6040 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6041 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6042 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6043 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6044 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6045 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6046 /* returning to 16-bit conforming code: */
6047 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
6048 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6049 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6050 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6051 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6052 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6053 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6054 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6055 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS16_CNF },
6056 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6057 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6058 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6059 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6060 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6061 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6062 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6063 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS16_CNF },
6064 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS16_CNF },
6065 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6066 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6067 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS16_CNF },
6068 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS16_CNF },
6069 { true, 42, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS16_CNF },
6070 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6071 /* returning to 32-bit code - narrow 16-bit target address: */
6072 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6073 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6074 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6075 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6076 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6077 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6078 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6079 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
6080 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6081 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6082 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6083 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6084 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6085 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6086 /* returning to 32-bit code - wider 32-bit target address: */
6087 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6088 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6089 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6090 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6091 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6092 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6093 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS16 | 0, 0 },
6094 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6095 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6096 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6097 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6098 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6099 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6100 /* returning to 32-bit conforming code: */
6101 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
6102 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6103 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6104 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6105 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6106 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6107 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6108 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6109 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS32_CNF },
6110 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6111 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6112 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6113 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6114 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6115 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6116 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6117 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
6118 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
6119 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6120 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6121 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
6122 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
6123 { true, 42, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS32_CNF },
6124 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6125
6126 /* some additional #GP variations */ /** @todo test all possible exceptions! */
6127 { true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
6128
6129 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_00 },
6130 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_02 },
6131 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_04 },
6132 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_06 },
6133 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_08 },
6134 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0a },
6135 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0c },
6136 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0e },
6137 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_10 },
6138 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_12 },
6139 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_14 },
6140 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_16 },
6141 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_18 },
6142 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1a },
6143 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1c },
6144 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1e },
6145 };
6146
6147 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
6148 {
6149 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
6150 //Bs3TestPrintf("-------------- #%u: cs:eip=%04RX16:%08RX64 imm=%u%s\n", iTest, Ctx.cs, Ctx.rip.u, s_aTests[iTest].cbImm,
6151 // s_aTests[iTest].fOpSizePfx == 1 ? " o16" : s_aTests[iTest].fOpSizePfx == 2 ? " o64" : "");
6152
6153 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
6154 {
6155 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
6156 if (s_aTests[iTest].fOpSizePfx != 1 || s_aSubTests[iSubTest].offDst <= UINT16_MAX)
6157 {
6158 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
6159 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx == 2 ? 8 : s_aTests[iTest].fOpSizePfx == 0 ? 4 : 2;
6160 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
6161 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
6162 uint64_t uDstRspExpect, uDstRspPush;
6163 //Bs3TestPrintf(" #%u: %s %d %#04RX16 -> %u %#04RX16:%#04RX32 %#04RX16 %#RX16\n", iSubTest, s_aSubTests[iSubTest].fInterPriv ? "priv" : "same", s_aSubTests[iSubTest].iXcpt, s_aSubTests[iSubTest].uStartSs,
6164 // s_aSubTests[iSubTest].cDstBits, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstSs, s_aSubTests[iSubTest].uErrCd);
6165
6166 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
6167 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
6168 if (s_aSubTests[iSubTest].fInterPriv)
6169 {
6170 if (s_aTests[iTest].fOpSizePfx != 1)
6171 {
6172 if (s_aTests[iTest].fOpSizePfx == 2)
6173 uDstRspPush |= UINT64_C(0xf00dfaceacdc0000);
6174 else
6175 uDstRspPush |= UINT32_C(0xacdc0000);
6176 if (s_aSubTests[iSubTest].cDstBits == 64)
6177 uDstRspExpect = uDstRspPush;
6178 else if (!BS3_SEL_IS_SS16(uDstSs))
6179 uDstRspExpect = (uint32_t)uDstRspPush;
6180 }
6181 }
6182
6183 CtxExpected.bCpl = Ctx.bCpl;
6184 CtxExpected.cs = Ctx.cs;
6185 CtxExpected.ss = Ctx.ss;
6186 CtxExpected.ds = Ctx.ds;
6187 CtxExpected.es = Ctx.es;
6188 CtxExpected.fs = Ctx.fs;
6189 CtxExpected.gs = Ctx.gs;
6190 CtxExpected.rip.u = Ctx.rip.u;
6191 CtxExpected.rsp.u = Ctx.rsp.u;
6192 CtxExpected.rax.u = Ctx.rax.u;
6193 if (s_aSubTests[iSubTest].iXcpt < 0)
6194 {
6195 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
6196 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
6197 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6198 {
6199 CtxExpected.rip.u += 1;
6200 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
6201 }
6202 CtxExpected.ss = uDstSs;
6203 CtxExpected.rsp.u = uDstRspExpect;
6204 if (s_aSubTests[iSubTest].fInterPriv)
6205 {
6206 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
6207 unsigned cSels = 4;
6208 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
6209 while (cSels-- > 0)
6210 {
6211 uint16_t uSel = *puSel;
6212 if ( (uSel & X86_SEL_MASK_OFF_RPL)
6213 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
6214 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6215 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6216 *puSel = 0;
6217 puSel++;
6218 }
6219 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
6220 }
6221 }
6222 g_uBs3TrapEipHint = CtxExpected.rip.u32;
6223 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]; %04RX16:%04RX64\n",Ctx.ss, Ctx.rsp.u,
6224 // CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush, CtxExpected.cs, CtxExpected.rip.u);
6225 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6226 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6227 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6228 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
6229 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
6230 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6231 if (s_aSubTests[iSubTest].iXcpt < 0)
6232 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
6233 else
6234 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6235 g_usBs3TestStep++;
6236
6237 /* Again single stepping: */
6238 //Bs3TestPrintf("stepping...\n");
6239 Bs3RegSetDr6(X86_DR6_INIT_VAL);
6240 Ctx.rflags.u16 |= X86_EFL_TF;
6241 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6242 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6243 {
6244 CtxExpected.rip.u -= 1;
6245 CtxExpected.rax.u = Ctx.rax.u;
6246 }
6247 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6248 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6249 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6250 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6251 if (s_aSubTests[iSubTest].iXcpt < 0)
6252 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
6253 else
6254 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6255 Ctx.rflags.u16 &= ~X86_EFL_TF;
6256 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6257 g_usBs3TestStep++;
6258 }
6259 }
6260 }
6261 }
6262 else
6263 Bs3TestFailed("wtf?");
6264
6265 if (BS3_MODE_IS_64BIT_SYS(bMode))
6266 Bs3TrapReInit();
6267 return 0;
6268}
6269
6270
6271
6272/*********************************************************************************************************************************
6273* Instruction Length *
6274*********************************************************************************************************************************/
6275
6276
6277static uint8_t bs3CpuBasic2_instr_len_Worker(uint8_t bMode, uint8_t BS3_FAR *pbCodeBuf)
6278{
6279 BS3TRAPFRAME TrapCtx;
6280 BS3REGCTX Ctx;
6281 BS3REGCTX CtxExpected;
6282 uint32_t uEipBase;
6283 unsigned cbInstr;
6284 unsigned off;
6285
6286 /* Make sure they're allocated and all zeroed. */
6287 Bs3MemZero(&Ctx, sizeof(Ctx));
6288 Bs3MemZero(&CtxExpected, sizeof(Ctx));
6289 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
6290
6291 /*
6292 * Create a context.
6293 *
6294 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
6295 */
6296 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
6297 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, (FPFNBS3FAR)pbCodeBuf);
6298 uEipBase = Ctx.rip.u32;
6299
6300 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
6301
6302 /*
6303 * Simple stuff crossing the page.
6304 */
6305 for (off = X86_PAGE_SIZE - 32; off <= X86_PAGE_SIZE + 16; off++)
6306 {
6307 Ctx.rip.u32 = uEipBase + off;
6308 for (cbInstr = 0; cbInstr < 24; cbInstr++)
6309 {
6310 /*
6311 * Generate the instructions:
6312 * [es] nop
6313 * ud2
6314 */
6315 if (cbInstr > 0)
6316 {
6317 Bs3MemSet(&pbCodeBuf[off], 0x26 /* es */, cbInstr);
6318 pbCodeBuf[off + cbInstr - 1] = 0x90; /* nop */
6319 }
6320 pbCodeBuf[off + cbInstr + 0] = 0x0f; /* ud2 */
6321 pbCodeBuf[off + cbInstr + 1] = 0x0b;
6322
6323 /*
6324 * Test it.
6325 */
6326 if (cbInstr < 16)
6327 CtxExpected.rip.u32 = Ctx.rip.u32 + cbInstr;
6328 else
6329 CtxExpected.rip.u32 = Ctx.rip.u32;
6330 g_uBs3TrapEipHint = CtxExpected.rip.u32;
6331 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6332 if (cbInstr < 16)
6333 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
6334 else
6335 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
6336 }
6337 pbCodeBuf[off] = 0xf1; /* icebp */
6338 }
6339
6340 /*
6341 * Pit instruction length violations against the segment limit (#GP).
6342 */
6343 if (!BS3_MODE_IS_RM_OR_V86(bMode) && bMode != BS3_MODE_LM64)
6344 {
6345 /** @todo */
6346 }
6347
6348 /*
6349 * Pit instruction length violations against an invalid page (#PF).
6350 */
6351 if (BS3_MODE_IS_PAGED(bMode))
6352 {
6353 /** @todo */
6354 }
6355
6356 return 0;
6357}
6358
6359
6360/**
6361 * Entrypoint for FAR RET tests.
6362 *
6363 * @returns 0 or BS3TESTDOMODE_SKIPPED.
6364 * @param bMode The CPU mode we're testing.
6365 */
6366BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_instr_len)(uint8_t bMode)
6367{
6368 /*
6369 * Allocate three pages so we can straddle an instruction across the
6370 * boundrary for testing special IEM cases, with the last page being
6371 * made in accessible and useful for pitting #PF against #GP.
6372 */
6373 uint8_t BS3_FAR * const pbCodeBuf = (uint8_t BS3_FAR *)Bs3MemAlloc(BS3MEMKIND_REAL, X86_PAGE_SIZE * 3);
6374 //Bs3TestPrintf("pbCodeBuf=%p\n", pbCodeBuf);
6375 if (pbCodeBuf)
6376 {
6377 Bs3MemSet(pbCodeBuf, 0xf1, X86_PAGE_SIZE * 3);
6378 bs3CpuBasic2_SetGlobals(bMode);
6379
6380 if (!BS3_MODE_IS_PAGED(bMode))
6381 bs3CpuBasic2_instr_len_Worker(bMode, pbCodeBuf);
6382 else
6383 {
6384 uint32_t const uFlatLastPg = Bs3SelPtrToFlat(pbCodeBuf) + X86_PAGE_SIZE * 2;
6385 int rc = Bs3PagingProtect(uFlatLastPg, X86_PAGE_SIZE, 0, X86_PTE_P);
6386 if (RT_SUCCESS(rc))
6387 {
6388 bs3CpuBasic2_instr_len_Worker(bMode, pbCodeBuf);
6389 Bs3PagingProtect(uFlatLastPg, X86_PAGE_SIZE, X86_PTE_P, 0);
6390 }
6391 else
6392 Bs3TestFailed("Failed to allocate 3 code pages");
6393 }
6394
6395 Bs3MemFree(pbCodeBuf, X86_PAGE_SIZE * 3);
6396 }
6397 else
6398 Bs3TestFailed("Failed to allocate 3 code pages");
6399 return 0;
6400}
6401
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette