VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 104401

Last change on this file since 104401 was 104401, checked in by vboxsync, 7 months ago

ValidationKit/bs3-cpu-basic-2: Add a test value > 4096 for the retn Iw testcase in order to test the arm64 path in the recompiler properly, bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 368.9 KB
Line 
1/* $Id: bs3-cpu-basic-2-x0.c 104401 2024-04-23 09:28:09Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <iprt/asm.h>
44#include <iprt/asm-amd64-x86.h>
45#include <iprt/asm-mem.h>
46
47
48/*********************************************************************************************************************************
49* Defined Constants And Macros *
50*********************************************************************************************************************************/
51#undef CHECK_MEMBER
52#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
53 do \
54 { \
55 if ((a_Actual) == (a_Expected)) { /* likely */ } \
56 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
57 } while (0)
58
59
60/** Indicating that we've got operand size prefix and that it matters. */
61#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
62/** Worker requires 386 or later. */
63#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
64
65
66/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
67 *
68 * These are flags, though we've precombined a few shortening things down.
69 *
70 * @{ */
71#define MYOP_LD 0x1 /**< The instruction loads. */
72#define MYOP_ST 0x2 /**< The instruction stores */
73#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
74#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
75
76#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
77#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
78/** @} */
79
80
81/*********************************************************************************************************************************
82* Structures and Typedefs *
83*********************************************************************************************************************************/
84/** Near void pointer. */
85typedef void BS3_NEAR *NPVOID;
86
87typedef struct BS3CB2INVLDESCTYPE
88{
89 uint8_t u4Type;
90 uint8_t u1DescType;
91} BS3CB2INVLDESCTYPE;
92
93typedef struct BS3CB2SIDTSGDT
94{
95 const char *pszDesc;
96 FPFNBS3FAR fpfnWorker;
97 uint8_t cbInstr;
98 bool fSs;
99 uint8_t bMode;
100 uint8_t fFlags;
101} BS3CB2SIDTSGDT;
102
103
104typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
105
106typedef struct FNBS3CPUBASIC2ACTSTCODE
107{
108 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
109 uint8_t fOp;
110 uint16_t cbMem;
111 uint8_t cbAlign;
112 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
113} FNBS3CPUBASIC2ACTSTCODE;
114typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
115
116typedef struct BS3CPUBASIC2ACTTSTCMNMODE
117{
118 uint8_t bMode;
119 uint16_t cEntries;
120 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
121} BS3CPUBASIC2PFTTSTCMNMODE;
122typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
123
124
125/*********************************************************************************************************************************
126* External Symbols *
127*********************************************************************************************************************************/
128extern FNBS3FAR bs3CpuBasic2_Int80;
129extern FNBS3FAR bs3CpuBasic2_Int81;
130extern FNBS3FAR bs3CpuBasic2_Int82;
131extern FNBS3FAR bs3CpuBasic2_Int83;
132
133extern FNBS3FAR bs3CpuBasic2_ud2;
134#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
135extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
136
137extern FNBS3FAR bs3CpuBasic2_salc_ud2;
138extern FNBS3FAR bs3CpuBasic2_swapgs;
139
140extern FNBS3FAR bs3CpuBasic2_iret;
141extern FNBS3FAR bs3CpuBasic2_iret_opsize;
142extern FNBS3FAR bs3CpuBasic2_iret_rexw;
143
144extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
145extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
146extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
147extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
148extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
149extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
150extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
151extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
152extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
153extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
156
157extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
158extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
159extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
160extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
161extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
162extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
163extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
166extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
167extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
168extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
169
170extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
171extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
172extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
173extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
174extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
175extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
176extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
177extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
178extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
179extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
180extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
181extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
182extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
183
184extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
185extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
186extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
187extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
188extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
189extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
190extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
191extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
192extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
193extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
194extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
195extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
196
197
198/* bs3-cpu-basic-2-template.mac: */
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
201FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
206FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
208
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
211FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
212FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
213FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
214FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
215FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
216FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
217FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
218
219FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
220FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
221FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
222FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
223FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
224FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
225FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
226FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
227FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
228
229
230/*********************************************************************************************************************************
231* Global Variables *
232*********************************************************************************************************************************/
233static const char BS3_FAR *g_pszTestMode = (const char *)1;
234static uint8_t g_bTestMode = 1;
235static bool g_f16BitSys = 1;
236
237
238/** SIDT test workers. */
239static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
240{
241 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
242 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
243 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
244 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
245 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
246 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
247 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
248 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
249 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
250 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
251 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
252 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
253};
254
255/** SGDT test workers. */
256static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
257{
258 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
259 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
260 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
261 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
262 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
263 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
264 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
265 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
266 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
267 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
268 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
269 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
270};
271
272/** LIDT test workers. */
273static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
274{
275 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
276 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
277 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
278 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
279 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
280 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
281 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
282 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
283 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
284 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
285 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
286 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
287 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
288};
289
290/** LGDT test workers. */
291static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
292{
293 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
294 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
295 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
296 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
297 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
298 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
299 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
300 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
301 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
302 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
303 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
304 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
305};
306
307
308
309#if 0
310/** Table containing invalid CS selector types. */
311static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
312{
313 { X86_SEL_TYPE_RO, 1 },
314 { X86_SEL_TYPE_RO_ACC, 1 },
315 { X86_SEL_TYPE_RW, 1 },
316 { X86_SEL_TYPE_RW_ACC, 1 },
317 { X86_SEL_TYPE_RO_DOWN, 1 },
318 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
319 { X86_SEL_TYPE_RW_DOWN, 1 },
320 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
321 { 0, 0 },
322 { 1, 0 },
323 { 2, 0 },
324 { 3, 0 },
325 { 4, 0 },
326 { 5, 0 },
327 { 6, 0 },
328 { 7, 0 },
329 { 8, 0 },
330 { 9, 0 },
331 { 10, 0 },
332 { 11, 0 },
333 { 12, 0 },
334 { 13, 0 },
335 { 14, 0 },
336 { 15, 0 },
337};
338
339/** Table containing invalid SS selector types. */
340static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
341{
342 { X86_SEL_TYPE_EO, 1 },
343 { X86_SEL_TYPE_EO_ACC, 1 },
344 { X86_SEL_TYPE_ER, 1 },
345 { X86_SEL_TYPE_ER_ACC, 1 },
346 { X86_SEL_TYPE_EO_CONF, 1 },
347 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
348 { X86_SEL_TYPE_ER_CONF, 1 },
349 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
350 { 0, 0 },
351 { 1, 0 },
352 { 2, 0 },
353 { 3, 0 },
354 { 4, 0 },
355 { 5, 0 },
356 { 6, 0 },
357 { 7, 0 },
358 { 8, 0 },
359 { 9, 0 },
360 { 10, 0 },
361 { 11, 0 },
362 { 12, 0 },
363 { 13, 0 },
364 { 14, 0 },
365 { 15, 0 },
366};
367#endif
368
369
370static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
371{
372 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
373 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
374 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
375 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
376 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
377 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
378 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
379 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
380 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
381};
382
383static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
384{
385 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
386 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
387 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
388 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
389 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
390 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
391 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
392 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
393 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
394};
395
396static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
397{
398 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
399 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
400 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
401 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
402 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
403 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
404 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
405 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
406 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
407};
408
409static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
410{
411 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
412 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
413 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
414 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
415};
416
417
418/**
419 * Sets globals according to the mode.
420 *
421 * @param bTestMode The test mode.
422 */
423static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
424{
425 g_bTestMode = bTestMode;
426 g_pszTestMode = Bs3GetModeName(bTestMode);
427 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
428 g_usBs3TestStep = 0;
429}
430
431
432uint32_t ASMGetESP(void);
433#pragma aux ASMGetESP = \
434 ".386" \
435 "mov ax, sp" \
436 "mov edx, esp" \
437 "shr edx, 16" \
438 value [ax dx] \
439 modify exact [ax dx];
440
441
442/**
443 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
444 * and g_pszTestMode.
445 */
446static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
447{
448 va_list va;
449
450 char szTmp[168];
451 va_start(va, pszFormat);
452 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
453 va_end(va);
454
455 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
456}
457
458
459#if 0
460/**
461 * Compares trap stuff.
462 */
463static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
464{
465 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
466 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
467 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
468 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
469 if (Bs3TestSubErrorCount() != cErrorsBefore)
470 {
471 Bs3TrapPrintFrame(pTrapCtx);
472#if 1
473 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
474 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
475 ASMHalt();
476#endif
477 }
478}
479#endif
480
481
482#if 0
483/**
484 * Compares trap stuff.
485 */
486static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
487 uint8_t bXcpt, uint16_t uHandlerCs)
488{
489 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
490 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
491 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
492 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
493 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
494 if (Bs3TestSubErrorCount() != cErrorsBefore)
495 {
496 Bs3TrapPrintFrame(pTrapCtx);
497#if 1
498 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
499 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
500 ASMHalt();
501#endif
502 }
503}
504#endif
505
506/**
507 * Compares a CPU trap.
508 */
509static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
510 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
511{
512 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
513 uint32_t fExtraEfl;
514
515 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
516 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
517
518 if ( g_f16BitSys
519 || bXcpt == X86_XCPT_DB /* hack (10980xe)... */
520 || ( !f486ResumeFlagHint
521 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
522 fExtraEfl = 0;
523 else
524 fExtraEfl = X86_EFL_RF;
525#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
526 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
527#endif
528 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
529 if (Bs3TestSubErrorCount() != cErrorsBefore)
530 {
531 Bs3TrapPrintFrame(pTrapCtx);
532#if 1
533 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
534 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
535 ASMHalt();
536#endif
537 }
538}
539
540
541/**
542 * Compares \#GP trap.
543 */
544static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
545{
546 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
547}
548
549#if 0
550/**
551 * Compares \#NP trap.
552 */
553static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
554{
555 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
556}
557#endif
558
559/**
560 * Compares \#SS trap.
561 */
562static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
563{
564 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
565}
566
567#if 0
568/**
569 * Compares \#TS trap.
570 */
571static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
572{
573 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
574}
575#endif
576
577/**
578 * Compares \#PF trap.
579 */
580static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
581 uint64_t uCr2Expected, uint8_t cbIpAdjust)
582{
583 uint64_t const uCr2Saved = pStartCtx->cr2.u;
584 pStartCtx->cr2.u = uCr2Expected;
585 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
586 pStartCtx->cr2.u = uCr2Saved;
587}
588
589/**
590 * Compares \#UD trap.
591 */
592static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
593{
594 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
595 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
596}
597
598/**
599 * Compares \#AC trap.
600 */
601static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
602{
603 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
604}
605
606/**
607 * Compares \#DB trap.
608 */
609static void bs3CpuBasic2_CompareDbCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint32_t fDr6Expect)
610{
611 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
612 uint32_t const fDr6 = Bs3RegGetDr6();
613 fDr6Expect |= X86_DR6_RA1_MASK;
614 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
615
616 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_DB, false /*f486ResumeFlagHint?*/, 0 /*cbIpAdjust*/);
617
618 if (Bs3TestSubErrorCount() > cErrorsBefore)
619 {
620#if 0
621 Bs3TestPrintf("Halting\n");
622 ASMHalt();
623#endif
624 }
625}
626
627
628/**
629 * Checks that DR6 has the initial value, i.e. is unchanged when other exception
630 * was raised before a \#DB could occur.
631 */
632static void bs3CpuBasic2_CheckDr6InitVal(void)
633{
634 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
635 uint32_t const fDr6 = Bs3RegGetDr6();
636 uint32_t const fDr6Expect = X86_DR6_INIT_VAL;
637 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
638 if (Bs3TestSubErrorCount() > cErrorsBefore)
639 {
640 Bs3TestPrintf("Halting\n");
641 ASMHalt();
642 }
643}
644
645#if 0 /* convert me */
646static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
647 PX86DESC const paIdt, unsigned const cIdteShift)
648{
649 BS3TRAPFRAME TrapCtx;
650 BS3REGCTX Ctx80;
651 BS3REGCTX Ctx81;
652 BS3REGCTX Ctx82;
653 BS3REGCTX Ctx83;
654 BS3REGCTX CtxTmp;
655 BS3REGCTX CtxTmp2;
656 PBS3REGCTX apCtx8x[4];
657 unsigned iCtx;
658 unsigned iRing;
659 unsigned iDpl;
660 unsigned iRpl;
661 unsigned i, j, k;
662 uint32_t uExpected;
663 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
664# if TMPL_BITS == 16
665 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
666 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
667# else
668 bool const f286 = false;
669 bool const f386Plus = true;
670 int rc;
671 uint8_t *pbIdtCopyAlloc;
672 PX86DESC pIdtCopy;
673 const unsigned cbIdte = 1 << (3 + cIdteShift);
674 RTCCUINTXREG uCr0Saved = ASMGetCR0();
675 RTGDTR GdtrSaved;
676# endif
677 RTIDTR IdtrSaved;
678 RTIDTR Idtr;
679
680 ASMGetIDTR(&IdtrSaved);
681# if TMPL_BITS != 16
682 ASMGetGDTR(&GdtrSaved);
683# endif
684
685 /* make sure they're allocated */
686 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
687 Bs3MemZero(&Ctx80, sizeof(Ctx80));
688 Bs3MemZero(&Ctx81, sizeof(Ctx81));
689 Bs3MemZero(&Ctx82, sizeof(Ctx82));
690 Bs3MemZero(&Ctx83, sizeof(Ctx83));
691 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
692 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
693
694 /* Context array. */
695 apCtx8x[0] = &Ctx80;
696 apCtx8x[1] = &Ctx81;
697 apCtx8x[2] = &Ctx82;
698 apCtx8x[3] = &Ctx83;
699
700# if TMPL_BITS != 16
701 /* Allocate memory for playing around with the IDT. */
702 pbIdtCopyAlloc = NULL;
703 if (BS3_MODE_IS_PAGED(g_bTestMode))
704 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
705# endif
706
707 /*
708 * IDT entry 80 thru 83 are assigned DPLs according to the number.
709 * (We'll be useing more, but this'll do for now.)
710 */
711 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
712 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
713 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
714 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
715
716 Bs3RegCtxSave(&Ctx80);
717 Ctx80.rsp.u -= 0x300;
718 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
719# if TMPL_BITS == 16
720 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
721# elif TMPL_BITS == 32
722 g_uBs3TrapEipHint = Ctx80.rip.u32;
723# endif
724 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
725 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
726 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
727 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
728 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
729 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
730
731 /*
732 * Check that all the above gates work from ring-0.
733 */
734 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
735 {
736 g_usBs3TestStep = iCtx;
737# if TMPL_BITS == 32
738 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
739# endif
740 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
741 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
742 }
743
744 /*
745 * Check that the gate DPL checks works.
746 */
747 g_usBs3TestStep = 100;
748 for (iRing = 0; iRing <= 3; iRing++)
749 {
750 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
751 {
752 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
753 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
754# if TMPL_BITS == 32
755 g_uBs3TrapEipHint = CtxTmp.rip.u32;
756# endif
757 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
758 if (iCtx < iRing)
759 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
760 else
761 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
762 g_usBs3TestStep++;
763 }
764 }
765
766 /*
767 * Modify the gate CS value and run the handler at a different CPL.
768 * Throw RPL variations into the mix (completely ignored) together
769 * with gate presence.
770 * 1. CPL <= GATE.DPL
771 * 2. GATE.P
772 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
773 */
774 g_usBs3TestStep = 1000;
775 for (i = 0; i <= 3; i++)
776 {
777 for (iRing = 0; iRing <= 3; iRing++)
778 {
779 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
780 {
781# if TMPL_BITS == 32
782 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
783# endif
784 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
785 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
786
787 for (j = 0; j <= 3; j++)
788 {
789 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
790 for (k = 0; k < 2; k++)
791 {
792 g_usBs3TestStep++;
793 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
794 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
795 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
796 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
797 /*Bs3TrapPrintFrame(&TrapCtx);*/
798 if (iCtx < iRing)
799 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
800 else if (k == 0)
801 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
802 else if (i > iRing)
803 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
804 else
805 {
806 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
807 if (i <= iCtx && i <= iRing)
808 uExpectedCs |= i;
809 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
810 }
811 }
812 }
813
814 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
815 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
816 }
817 }
818 }
819 BS3_ASSERT(g_usBs3TestStep < 1600);
820
821 /*
822 * Various CS and SS related faults
823 *
824 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
825 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
826 * without making it impossible to handle faults.
827 */
828 g_usBs3TestStep = 1600;
829 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
830 Bs3GdteTestPage00.Gen.u1Present = 0;
831 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
832 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
833
834 /* CS.PRESENT = 0 */
835 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
836 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
837 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
838 bs3CpuBasic2_FailedF("selector was accessed");
839 g_usBs3TestStep++;
840
841 /* Check that GATE.DPL is checked before CS.PRESENT. */
842 for (iRing = 1; iRing < 4; iRing++)
843 {
844 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
845 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
846 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
847 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
848 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
849 bs3CpuBasic2_FailedF("selector was accessed");
850 g_usBs3TestStep++;
851 }
852
853 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
854 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
855 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
856 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
857 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
858 bs3CpuBasic2_FailedF("CS selector was accessed");
859 g_usBs3TestStep++;
860 for (iDpl = 1; iDpl < 4; iDpl++)
861 {
862 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
863 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
864 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
865 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
866 bs3CpuBasic2_FailedF("CS selector was accessed");
867 g_usBs3TestStep++;
868 }
869
870 /* 1608: Check all the invalid CS selector types alone. */
871 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
872 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
873 {
874 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
875 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
876 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
877 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
878 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
879 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
880 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
881 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
882 g_usBs3TestStep++;
883
884 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
885 Bs3GdteTestPage00.Gen.u1Present = 0;
886 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
887 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
888 Bs3GdteTestPage00.Gen.u1Present = 1;
889 g_usBs3TestStep++;
890 }
891
892 /* Fix CS again. */
893 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
894
895 /* 1632: Test SS. */
896 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
897 {
898 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
899 uint16_t const uSavedSs2 = *puTssSs2;
900 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
901
902 /* Make the handler execute in ring-2. */
903 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
904 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
905 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
906
907 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
908 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
909 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
910 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
911 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
912 bs3CpuBasic2_FailedF("CS selector was not access");
913 g_usBs3TestStep++;
914
915 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
916 that we get #SS if the selector isn't present. */
917 i = 0; /* used for cycling thru invalid CS types */
918 for (k = 0; k < 10; k++)
919 {
920 /* k=0: present,
921 k=1: not-present,
922 k=2: present but very low limit,
923 k=3: not-present, low limit.
924 k=4: present, read-only.
925 k=5: not-present, read-only.
926 k=6: present, code-selector.
927 k=7: not-present, code-selector.
928 k=8: present, read-write / no access + system (=LDT).
929 k=9: not-present, read-write / no access + system (=LDT).
930 */
931 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
932 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
933 if (k >= 8)
934 {
935 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
936 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
937 }
938 else if (k >= 6)
939 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
940 else if (k >= 4)
941 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
942 else if (k >= 2)
943 {
944 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
945 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
946 Bs3GdteTestPage03.Gen.u1Granularity = 0;
947 }
948
949 for (iDpl = 0; iDpl < 4; iDpl++)
950 {
951 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
952
953 for (iRpl = 0; iRpl < 4; iRpl++)
954 {
955 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
956 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
957 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
958 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
959 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
960 if (iRpl != 2 || iRpl != iDpl || k >= 4)
961 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
962 else if (k != 0)
963 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
964 k == 2 /*f486ResumeFlagHint*/);
965 else
966 {
967 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
968 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
969 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
970 }
971 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
972 bs3CpuBasic2_FailedF("CS selector was not access");
973 if ( TrapCtx.bXcpt == 0x83
974 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
975 {
976 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
977 bs3CpuBasic2_FailedF("SS selector was not accessed");
978 }
979 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
980 bs3CpuBasic2_FailedF("SS selector was accessed");
981 g_usBs3TestStep++;
982
983 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
984 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
985 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
986 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
987 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
988 g_usBs3TestStep++;
989
990 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
991 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
992 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
993 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
994 g_usBs3TestStep++;
995
996 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
997 Bs3GdteTestPage02.Gen.u1Present = 0;
998 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
999 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1000 Bs3GdteTestPage02.Gen.u1Present = 1;
1001 g_usBs3TestStep++;
1002
1003 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
1004 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
1005 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
1006 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1007 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1008 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
1009 Bs3GdteTestPage02.Gen.u1DescType = 1;
1010 g_usBs3TestStep++;
1011
1012 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
1013 The 286 had a simpler approach to these GP(0). */
1014 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
1015 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
1016 Bs3GdteTestPage02.Gen.u1Granularity = 0;
1017 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1018 if (f286)
1019 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1020 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
1021 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1022 else if (k != 0)
1023 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
1024 else
1025 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1026 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1027 g_usBs3TestStep++;
1028 }
1029 }
1030 }
1031
1032 /* Check all the invalid SS selector types alone. */
1033 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1034 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1035 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1036 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1037 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1038 g_usBs3TestStep++;
1039 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
1040 {
1041 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
1042 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
1043 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1044 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1045 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
1046 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
1047 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
1048 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
1049 g_usBs3TestStep++;
1050 }
1051
1052 /*
1053 * Continue the SS experiments with a expand down segment. We'll use
1054 * the same setup as we already have with gate 83h being DPL and
1055 * having CS.DPL=2.
1056 *
1057 * Expand down segments are weird. The valid area is practically speaking
1058 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1059 * addresses from 0xffff thru 0x6001.
1060 *
1061 * So, with expand down segments we can more easily cut partially into the
1062 * pushing of the iret frame and trigger more interesting behavior than
1063 * with regular "expand up" segments where the whole pushing area is either
1064 * all fine or not not fine.
1065 */
1066 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1067 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1068 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1069 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1070 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1071
1072 /* First test, limit = max --> no bytes accessible --> #GP */
1073 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1074 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1075
1076 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1077 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1078 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1079 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1080 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1081
1082 /* Modify the gate handler to be a dummy that immediately does UD2
1083 and triggers #UD, then advance the limit down till we get the #UD. */
1084 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1085
1086 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1087 if (g_f16BitSys)
1088 {
1089 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1090 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1091 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1092 }
1093 else
1094 {
1095 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1096 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1097 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1098 }
1099 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1100 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1101 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1102 CtxTmp2.bCpl = 2;
1103
1104 /* test run. */
1105 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1106 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1107 g_usBs3TestStep++;
1108
1109 /* Real run. */
1110 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1111 while (i-- > 0)
1112 {
1113 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1114 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1115 if (i > 0)
1116 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1117 else
1118 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1119 g_usBs3TestStep++;
1120 }
1121
1122 /* Do a run where we do the same-ring kind of access. */
1123 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1124 if (g_f16BitSys)
1125 {
1126 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1127 i = 2*3 - 1;
1128 }
1129 else
1130 {
1131 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1132 i = 4*3 - 1;
1133 }
1134 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1135 CtxTmp2.ds = CtxTmp.ds;
1136 CtxTmp2.es = CtxTmp.es;
1137 CtxTmp2.fs = CtxTmp.fs;
1138 CtxTmp2.gs = CtxTmp.gs;
1139 while (i-- > 0)
1140 {
1141 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1142 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1143 if (i > 0)
1144 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1145 else
1146 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1147 g_usBs3TestStep++;
1148 }
1149
1150 *puTssSs2 = uSavedSs2;
1151 paIdt[0x83 << cIdteShift] = SavedGate83;
1152 }
1153 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1154 BS3_ASSERT(g_usBs3TestStep < 3000);
1155
1156 /*
1157 * Modify the gate CS value with a conforming segment.
1158 */
1159 g_usBs3TestStep = 3000;
1160 for (i = 0; i <= 3; i++) /* cs.dpl */
1161 {
1162 for (iRing = 0; iRing <= 3; iRing++)
1163 {
1164 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1165 {
1166 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1167 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1168# if TMPL_BITS == 32
1169 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1170# endif
1171
1172 for (j = 0; j <= 3; j++) /* rpl */
1173 {
1174 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1175 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1176 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1177 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1178 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1179 /*Bs3TrapPrintFrame(&TrapCtx);*/
1180 g_usBs3TestStep++;
1181 if (iCtx < iRing)
1182 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1183 else if (i > iRing)
1184 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1185 else
1186 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1187 }
1188 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1189 }
1190 }
1191 }
1192 BS3_ASSERT(g_usBs3TestStep < 3500);
1193
1194 /*
1195 * The gates must be 64-bit in long mode.
1196 */
1197 if (cIdteShift != 0)
1198 {
1199 g_usBs3TestStep = 3500;
1200 for (i = 0; i <= 3; i++)
1201 {
1202 for (iRing = 0; iRing <= 3; iRing++)
1203 {
1204 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1205 {
1206 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1207 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1208
1209 for (j = 0; j < 2; j++)
1210 {
1211 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1212 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1213 g_usBs3TestStep++;
1214 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1215 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1216 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1217 /*Bs3TrapPrintFrame(&TrapCtx);*/
1218 if (iCtx < iRing)
1219 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1220 else
1221 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1222 }
1223 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1224 }
1225 }
1226 }
1227 BS3_ASSERT(g_usBs3TestStep < 4000);
1228 }
1229
1230 /*
1231 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1232 */
1233 g_usBs3TestStep = 5000;
1234 i = (0x80 << (cIdteShift + 3)) - 1;
1235 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1236 k = (0x83 << (cIdteShift + 3)) - 1;
1237 for (; i <= k; i++, g_usBs3TestStep++)
1238 {
1239 Idtr = IdtrSaved;
1240 Idtr.cbIdt = i;
1241 ASMSetIDTR(&Idtr);
1242 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1243 if (i < j)
1244 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1245 else
1246 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1247 }
1248 ASMSetIDTR(&IdtrSaved);
1249 BS3_ASSERT(g_usBs3TestStep < 5100);
1250
1251# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1252
1253 /*
1254 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1255 * first page and 0x81 is on the second page. We need proceed to move
1256 * it down byte by byte to check that any inaccessible byte means #PF.
1257 *
1258 * Note! We must reload the alternative IDTR for each run as any kind of
1259 * printing to the string (like error reporting) will cause a switch
1260 * to real mode and back, reloading the default IDTR.
1261 */
1262 g_usBs3TestStep = 5200;
1263 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1264 {
1265 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1266 for (j = 0; j < cbIdte; j++)
1267 {
1268 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1269 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1270
1271 Idtr.cbIdt = IdtrSaved.cbIdt;
1272 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1273
1274 ASMSetIDTR(&Idtr);
1275 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1276 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1277 g_usBs3TestStep++;
1278
1279 ASMSetIDTR(&Idtr);
1280 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1281 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1282 g_usBs3TestStep++;
1283
1284 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1285 if (RT_SUCCESS(rc))
1286 {
1287 ASMSetIDTR(&Idtr);
1288 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1289 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1290 g_usBs3TestStep++;
1291
1292 ASMSetIDTR(&Idtr);
1293 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1294 if (f486Plus)
1295 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1296 else
1297 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1298 g_usBs3TestStep++;
1299
1300 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1301
1302 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1303 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1304 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1305 if (RT_SUCCESS(rc))
1306 {
1307 ASMSetIDTR(&Idtr);
1308 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1309 if (f486Plus)
1310 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1311 else
1312 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1313 g_usBs3TestStep++;
1314
1315 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1316 }
1317 }
1318 else
1319 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1320
1321 ASMSetIDTR(&IdtrSaved);
1322 }
1323 }
1324
1325 /*
1326 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1327 */
1328 g_usBs3TestStep = 5300;
1329 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1330 {
1331 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1332 Idtr.cbIdt = IdtrSaved.cbIdt;
1333 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1334
1335 ASMSetIDTR(&Idtr);
1336 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1337 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1338 g_usBs3TestStep++;
1339
1340 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1341 if (RT_SUCCESS(rc))
1342 {
1343 ASMSetIDTR(&Idtr);
1344 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1345 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1346 g_usBs3TestStep++;
1347
1348 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1349 }
1350 ASMSetIDTR(&IdtrSaved);
1351 }
1352
1353 /*
1354 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1355 * with interrupt gates 80h and 83h, respectively.
1356 */
1357/** @todo Throw in SS.u1Accessed too. */
1358 g_usBs3TestStep = 5400;
1359 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1360 {
1361 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1362 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1363 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1364
1365 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1366 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1367 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1368
1369 /* Check that the CS.A bit is being set on a general basis and that
1370 the special CS values work with out generic handler code. */
1371 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1372 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1373 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1374 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1375 g_usBs3TestStep++;
1376
1377 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1378 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1379 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1380 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1381 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1382 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1383 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1384 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1385 g_usBs3TestStep++;
1386
1387 /*
1388 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1389 * fault due to the RW bit being zero.
1390 * (We check both with with and without the WP bit if 80486.)
1391 */
1392 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1393 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1394
1395 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1396 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1397 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1398 if (RT_SUCCESS(rc))
1399 {
1400 /* ring-0 handler */
1401 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1402 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1403 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1404 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1405 g_usBs3TestStep++;
1406
1407 /* ring-3 handler */
1408 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1409 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1410 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1411 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1412 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1413 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1414 g_usBs3TestStep++;
1415
1416 /* clear WP and repeat the above. */
1417 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1418 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1419 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1420 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1421
1422 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1423 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1424 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1425 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1426 g_usBs3TestStep++;
1427
1428 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1429 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1430 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1431 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1432 g_usBs3TestStep++;
1433
1434 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1435 }
1436
1437 ASMSetCR0(uCr0Saved);
1438
1439 /*
1440 * While we're here, check that if the CS GDT entry is a non-present
1441 * page we do get a #PF with the rigth error code and CR2.
1442 */
1443 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1444 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1445 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1446 if (RT_SUCCESS(rc))
1447 {
1448 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1449 if (f486Plus)
1450 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1451 else
1452 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1453 g_usBs3TestStep++;
1454
1455 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1456 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1457 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1458 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1459
1460 if (f486Plus)
1461 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1462 else
1463 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1464 g_usBs3TestStep++;
1465
1466 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1467 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1468 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1469 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1470 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1471 }
1472
1473 /* restore */
1474 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1475 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1476 }
1477
1478# endif /* 32 || 64*/
1479
1480 /*
1481 * Check broad EFLAGS effects.
1482 */
1483 g_usBs3TestStep = 5600;
1484 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1485 {
1486 for (iRing = 0; iRing < 4; iRing++)
1487 {
1488 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1489 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1490
1491 /* all set */
1492 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1493 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1494 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1495 if (f486Plus)
1496 CtxTmp.rflags.u32 |= X86_EFL_AC;
1497 if (f486Plus && !g_f16BitSys)
1498 CtxTmp.rflags.u32 |= X86_EFL_RF;
1499 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1500 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1501 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1502 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1503
1504 if (iCtx >= iRing)
1505 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1506 else
1507 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1508 uExpected = CtxTmp.rflags.u32
1509 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1510 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1511 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1512 if (TrapCtx.fHandlerRfl != uExpected)
1513 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1514 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1515 g_usBs3TestStep++;
1516
1517 /* all cleared */
1518 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1519 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1520 else
1521 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1522 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1523 if (iCtx >= iRing)
1524 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1525 else
1526 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1527 uExpected = CtxTmp.rflags.u32;
1528 if (TrapCtx.fHandlerRfl != uExpected)
1529 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1530 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1531 g_usBs3TestStep++;
1532 }
1533 }
1534
1535/** @todo CS.LIMIT / canonical(CS) */
1536
1537
1538 /*
1539 * Check invalid gate types.
1540 */
1541 g_usBs3TestStep = 32000;
1542 for (iRing = 0; iRing <= 3; iRing++)
1543 {
1544 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1545 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1546 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1547 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1548 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1549 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1550 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1551 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1552 /*286:*/ 12, 14, 15 };
1553 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1554 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1555 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1556
1557
1558 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1559 {
1560 unsigned iType;
1561
1562 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1563 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1564# if TMPL_BITS == 32
1565 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1566# endif
1567 for (iType = 0; iType < cInvTypes; iType++)
1568 {
1569 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1570 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1571 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1572
1573 for (i = 0; i < 4; i++)
1574 {
1575 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1576 {
1577 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1578 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1579 : s_auCSes[j] | i;
1580 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1581 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1582 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1583 g_usBs3TestStep++;
1584 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1585
1586 /* Mark it not-present to check that invalid type takes precedence. */
1587 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1588 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1589 g_usBs3TestStep++;
1590 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1591 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1592 }
1593 }
1594
1595 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1596 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1597 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1598 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1599 }
1600 }
1601 }
1602 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1603
1604
1605 /** @todo
1606 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1607 * - Quickly generate all faults.
1608 * - All the peculiarities v8086.
1609 */
1610
1611# if TMPL_BITS != 16
1612 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1613# endif
1614}
1615#endif /* convert me */
1616
1617
1618static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1619 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1620{
1621 BS3TRAPFRAME TrapCtx;
1622 BS3REGCTX Ctx;
1623 BS3REGCTX CtxUdExpected;
1624 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1625 uint8_t iRing;
1626 uint16_t iTest;
1627
1628 /* make sure they're allocated */
1629 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1630 Bs3MemZero(&Ctx, sizeof(Ctx));
1631 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1632
1633 /*
1634 * Test all relevant rings.
1635 *
1636 * The memory operand is ds:xBX, so point it to pbBuf.
1637 * The test snippets mostly use xAX as operand, with the div
1638 * one also using xDX, so make sure they make some sense.
1639 */
1640 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1641
1642 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1643
1644 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1645 {
1646 uint32_t uEbx;
1647 uint8_t fAc;
1648
1649 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1650 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1651
1652 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1653 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1654 else
1655 {
1656 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1657 Ctx.ds = BS3_FP_SEG(pbBuf);
1658 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1659 }
1660 uEbx = Ctx.rbx.u32;
1661
1662 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1663 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1664 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1665
1666 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1667
1668 /*
1669 * AC flag loop.
1670 */
1671 for (fAc = 0; fAc < 2; fAc++)
1672 {
1673 if (fAc)
1674 Ctx.rflags.u32 |= X86_EFL_AC;
1675 else
1676 Ctx.rflags.u32 &= ~X86_EFL_AC;
1677
1678 /*
1679 * Loop over the test snippets.
1680 */
1681 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1682 {
1683 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1684 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1685 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1686 uint16_t const cbMax = cbCacheLine + cbMem;
1687 uint16_t offMem;
1688 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1689 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1690 CtxUdExpected.rip = Ctx.rip;
1691 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1692 CtxUdExpected.cs = Ctx.cs;
1693 CtxUdExpected.rflags = Ctx.rflags;
1694 if (bMode == BS3_MODE_RM)
1695 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1696 CtxUdExpected.rdx = Ctx.rdx;
1697 CtxUdExpected.rax = Ctx.rax;
1698 if (fOp & MYOP_LD)
1699 {
1700 switch (cbMem)
1701 {
1702 case 2:
1703 CtxUdExpected.rax.u16 = 0x0101;
1704 break;
1705 case 4:
1706 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1707 break;
1708 case 8:
1709 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1710 break;
1711 }
1712 }
1713
1714 /*
1715 * Buffer misalignment loop.
1716 * Note! We must make sure to cross a cache line here to make sure
1717 * to cover the split-lock scenario. (The buffer is cache
1718 * line aligned.)
1719 */
1720 for (offMem = 0; offMem < cbMax; offMem++)
1721 {
1722 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1723 unsigned offBuf = cbMax + cbMem * 2;
1724 while (offBuf-- > 0)
1725 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1726
1727 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1728 if (BS3_MODE_IS_16BIT_SYS(bMode))
1729 g_uBs3TrapEipHint = Ctx.rip.u32;
1730
1731 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1732 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1733
1734 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1735
1736 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1737 && fMisaligned
1738 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1739 {
1740 if (fAc && bMode == BS3_MODE_RM)
1741 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1742 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1743 }
1744 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1745 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1746 X86_TRAP_PF_P | X86_TRAP_PF_US
1747 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1748 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1749 pCmn->paEntries[iTest].offFaultInstr);
1750 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1751 {
1752 if (fOp & MYOP_EFL)
1753 {
1754 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1755 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1756 }
1757 if (fOp == MYOP_LD_DIV)
1758 {
1759 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1760 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1761 }
1762 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1763 }
1764 else
1765 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1766
1767 g_usBs3TestStep++;
1768 }
1769 }
1770 }
1771 }
1772}
1773
1774
1775/**
1776 * Entrypoint for \#AC tests.
1777 *
1778 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1779 * @param bMode The CPU mode we're testing.
1780 *
1781 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1782 * with control registers and such.
1783 */
1784BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1785{
1786 unsigned cbCacheLine = 128; /** @todo detect */
1787 uint8_t BS3_FAR *pbBufAlloc;
1788 uint8_t BS3_FAR *pbBuf;
1789 unsigned idxCmnModes;
1790 uint32_t fCr0;
1791
1792 /*
1793 * Skip if 386 or older.
1794 */
1795 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1796 {
1797 Bs3TestSkipped("#AC test requires 486 or later");
1798 return BS3TESTDOMODE_SKIPPED;
1799 }
1800
1801 bs3CpuBasic2_SetGlobals(bMode);
1802
1803 /* Get us a 64-byte aligned buffer. */
1804 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1805 if (!pbBufAlloc)
1806 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1807 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1808 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1809 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1810 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1811
1812 /* Find the g_aCmnModes entry. */
1813 idxCmnModes = 0;
1814 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1815 idxCmnModes++;
1816 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1817
1818 /* First round is w/o alignment checks enabled. */
1819 //Bs3TestPrintf("round 1\n");
1820 fCr0 = Bs3RegGetCr0();
1821 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1822 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1823#if 1
1824 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1825#endif
1826
1827 /* The second round is with aligment checks enabled. */
1828#if 1
1829 //Bs3TestPrintf("round 2\n");
1830 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1831 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1832#endif
1833
1834#if 1
1835 /* The third and fourth round access the buffer via a page alias that's not
1836 accessible from ring-3. The third round has ACs disabled and the fourth
1837 has them enabled. */
1838 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1839 {
1840 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1841 /** @todo the aliasing is not necessary any more... */
1842 int rc;
1843 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1844 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1845 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1846 X86_PTE_P | X86_PTE_RW);
1847 if (RT_SUCCESS(rc))
1848 {
1849 /* We 'misalign' the segment base here to make sure it's the final
1850 address that gets alignment checked and not just the operand value. */
1851 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1852 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1853 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1854
1855 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1856 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1857 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1858 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1859
1860 //Bs3TestPrintf("round 4\n");
1861 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1862 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1863 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1864
1865 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1866 }
1867 else
1868 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1869 }
1870#endif
1871
1872 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1873 Bs3RegSetCr0(fCr0);
1874 return 0;
1875}
1876
1877
1878/**
1879 * Executes one round of SIDT and SGDT tests using one assembly worker.
1880 *
1881 * This is written with driving everything from the 16-bit or 32-bit worker in
1882 * mind, i.e. not assuming the test bitcount is the same as the current.
1883 */
1884static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1885 uint8_t const *pbExpected)
1886{
1887 BS3TRAPFRAME TrapCtx;
1888 BS3REGCTX Ctx;
1889 BS3REGCTX CtxUdExpected;
1890 BS3REGCTX TmpCtx;
1891 uint8_t const cbBuf = 8*2; /* test buffer area */
1892 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1893 uint8_t BS3_FAR *pbBuf = abBuf;
1894 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1895 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1896 uint8_t bFiller;
1897 int off;
1898 int off2;
1899 unsigned cb;
1900 uint8_t BS3_FAR *pbTest;
1901
1902 /* make sure they're allocated */
1903 Bs3MemZero(&Ctx, sizeof(Ctx));
1904 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1905 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1906 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1907 Bs3MemZero(&abBuf, sizeof(abBuf));
1908
1909 /* Create a context, give this routine some more stack space, point the context
1910 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1911 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1912 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1913 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1914 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1915 g_uBs3TrapEipHint = Ctx.rip.u32;
1916 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1917 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1918
1919 /* For successful SIDT attempts, we'll stop at the UD2. */
1920 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1921 CtxUdExpected.rip.u += pWorker->cbInstr;
1922
1923 /*
1924 * Check that it works at all and that only bytes we expect gets written to.
1925 */
1926 /* First with zero buffer. */
1927 Bs3MemZero(abBuf, sizeof(abBuf));
1928 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1929 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1930 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1931 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1932 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1933 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1934 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1935 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1936 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1937 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1938 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1939 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1940 g_usBs3TestStep++;
1941
1942 /* Again with a buffer filled with a byte not occuring in the previous result. */
1943 bFiller = 0x55;
1944 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1945 bFiller++;
1946 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1947 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1948 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1949
1950 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1951 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1952 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1953 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1954 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1955 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1956 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1957 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1958 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1959 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1960 g_usBs3TestStep++;
1961
1962 /*
1963 * Slide the buffer along 8 bytes to cover misalignment.
1964 */
1965 for (off = 0; off < 8; off++)
1966 {
1967 pbBuf = &abBuf[off];
1968 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1969 CtxUdExpected.rbx.u = Ctx.rbx.u;
1970
1971 /* First with zero buffer. */
1972 Bs3MemZero(abBuf, sizeof(abBuf));
1973 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1974 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1975 if (off > 0 && !ASMMemIsZero(abBuf, off))
1976 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1977 cbIdtr, off, off + cbBuf, abBuf);
1978 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1979 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1980 cbIdtr, off, off + cbBuf, abBuf);
1981 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1982 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1983 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1984 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1985 g_usBs3TestStep++;
1986
1987 /* Again with a buffer filled with a byte not occuring in the previous result. */
1988 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1989 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1990 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1991 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1992 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1993 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1994 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1995 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1996 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1997 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1998 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1999 cbIdtr, off, bFiller, off + cbBuf, abBuf);
2000 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2001 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
2002 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2003 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2004 g_usBs3TestStep++;
2005 }
2006 pbBuf = abBuf;
2007 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2008 CtxUdExpected.rbx.u = Ctx.rbx.u;
2009
2010 /*
2011 * Play with the selector limit if the target mode supports limit checking
2012 * We use BS3_SEL_TEST_PAGE_00 for this
2013 */
2014 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2015 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2016 {
2017 uint16_t cbLimit;
2018 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
2019 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2020 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2021 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2022 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2023 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2024
2025 if (pWorker->fSs)
2026 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2027 else
2028 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2029
2030 /* Expand up (normal). */
2031 for (off = 0; off < 8; off++)
2032 {
2033 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2034 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2035 {
2036 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2037 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2038 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2039 if (off + cbIdtr <= cbLimit + 1)
2040 {
2041 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2042 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2043 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2044 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2045 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2046 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2047 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2048 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2049 }
2050 else
2051 {
2052 if (pWorker->fSs)
2053 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2054 else
2055 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2056 if (off + 2 <= cbLimit + 1)
2057 {
2058 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2059 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2060 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2061 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2062 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2063 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2064 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2065 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2066 }
2067 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2068 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2069 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2070 }
2071
2072 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2073 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2074 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2075 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2076 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2077 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2078
2079 g_usBs3TestStep++;
2080 }
2081 }
2082
2083 /* Expand down (weird). Inverted valid area compared to expand up,
2084 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2085 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2086 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2087 (because in a normal expand up the 0ffffh means all 64KB are
2088 accessible). */
2089 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2090 for (off = 0; off < 8; off++)
2091 {
2092 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2093 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2094 {
2095 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2096 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2097 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2098
2099 if (off > cbLimit)
2100 {
2101 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2102 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2103 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2104 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2105 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2106 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2107 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2108 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2109 }
2110 else
2111 {
2112 if (pWorker->fSs)
2113 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2114 else
2115 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2116 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2117 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2118 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2119 }
2120
2121 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2122 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2123 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2124 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2125 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2126 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2127
2128 g_usBs3TestStep++;
2129 }
2130 }
2131
2132 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2133 CtxUdExpected.rbx.u = Ctx.rbx.u;
2134 CtxUdExpected.ss = Ctx.ss;
2135 CtxUdExpected.ds = Ctx.ds;
2136 }
2137
2138 /*
2139 * Play with the paging.
2140 */
2141 if ( BS3_MODE_IS_PAGED(bTestMode)
2142 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2143 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2144 {
2145 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2146
2147 /*
2148 * Slide the buffer towards the trailing guard page. We'll observe the
2149 * first word being written entirely separately from the 2nd dword/qword.
2150 */
2151 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2152 {
2153 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2154 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2155 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2156 if (off + cbIdtr <= X86_PAGE_SIZE)
2157 {
2158 CtxUdExpected.rbx = Ctx.rbx;
2159 CtxUdExpected.ss = Ctx.ss;
2160 CtxUdExpected.ds = Ctx.ds;
2161 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2162 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2163 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2164 }
2165 else
2166 {
2167 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2168 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2169 if ( off <= X86_PAGE_SIZE - 2
2170 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2171 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2172 pbExpected, &pbTest[off], off);
2173 if ( off < X86_PAGE_SIZE - 2
2174 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2175 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2176 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2177 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2178 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2179 }
2180 g_usBs3TestStep++;
2181 }
2182
2183 /*
2184 * Now, do it the other way around. It should look normal now since writing
2185 * the limit will #PF first and nothing should be written.
2186 */
2187 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2188 {
2189 Bs3MemSet(pbTest, bFiller, 48);
2190 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2191 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2192 if (off >= 0)
2193 {
2194 CtxUdExpected.rbx = Ctx.rbx;
2195 CtxUdExpected.ss = Ctx.ss;
2196 CtxUdExpected.ds = Ctx.ds;
2197 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2198 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2199 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2200 }
2201 else
2202 {
2203 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2204 uFlatTest + off, 0 /*cbIpAdjust*/);
2205 if ( -off < cbIdtr
2206 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2207 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2208 bFiller, cbIdtr + off, pbTest, off);
2209 }
2210 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2211 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2212 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2213 g_usBs3TestStep++;
2214 }
2215
2216 /*
2217 * Combine paging and segment limit and check ordering.
2218 * This is kind of interesting here since it the instruction seems to
2219 * be doing two separate writes.
2220 */
2221 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2222 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2223 {
2224 uint16_t cbLimit;
2225
2226 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2227 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2228 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2229 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2230 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2231
2232 if (pWorker->fSs)
2233 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2234 else
2235 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2236
2237 /* Expand up (normal), approaching tail guard page. */
2238 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2239 {
2240 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2241 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2242 {
2243 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2244 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2245 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2246 if (off + cbIdtr <= cbLimit + 1)
2247 {
2248 /* No #GP, but maybe #PF. */
2249 if (off + cbIdtr <= X86_PAGE_SIZE)
2250 {
2251 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2252 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2253 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2254 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2255 }
2256 else
2257 {
2258 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2259 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2260 if ( off <= X86_PAGE_SIZE - 2
2261 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2262 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2263 pbExpected, &pbTest[off], off);
2264 cb = X86_PAGE_SIZE - off - 2;
2265 if ( off < X86_PAGE_SIZE - 2
2266 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2267 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2268 bFiller, cb, &pbTest[off + 2], off);
2269 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2270 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2271 }
2272 }
2273 else if (off + 2 <= cbLimit + 1)
2274 {
2275 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2276 if (off <= X86_PAGE_SIZE - 2)
2277 {
2278 if (pWorker->fSs)
2279 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2280 else
2281 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2282 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2283 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2284 pbExpected, &pbTest[off], off);
2285 cb = X86_PAGE_SIZE - off - 2;
2286 if ( off < X86_PAGE_SIZE - 2
2287 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2288 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2289 bFiller, cb, &pbTest[off + 2], off);
2290 }
2291 else
2292 {
2293 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2294 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2295 if ( off < X86_PAGE_SIZE
2296 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2297 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2298 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2299 }
2300 }
2301 else
2302 {
2303 /* #GP/#SS on limit. */
2304 if (pWorker->fSs)
2305 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2306 else
2307 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2308 if ( off < X86_PAGE_SIZE
2309 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2310 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2311 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2312 }
2313
2314 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2315 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2316 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2317 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2318
2319 g_usBs3TestStep++;
2320
2321 /* Set DS to 0 and check that we get #GP(0). */
2322 if (!pWorker->fSs)
2323 {
2324 Ctx.ds = 0;
2325 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2326 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2327 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2328 g_usBs3TestStep++;
2329 }
2330 }
2331 }
2332
2333 /* Expand down. */
2334 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2335 uFlatTest -= X86_PAGE_SIZE;
2336
2337 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2338 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2339 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2340 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2341
2342 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2343 {
2344 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2345 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2346 {
2347 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2348 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2349 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2350 if (cbLimit < off && off >= X86_PAGE_SIZE)
2351 {
2352 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2353 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2354 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2355 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2356 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2357 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2358 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2359 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2360 }
2361 else
2362 {
2363 if (cbLimit < off && off < X86_PAGE_SIZE)
2364 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2365 uFlatTest + off, 0 /*cbIpAdjust*/);
2366 else if (pWorker->fSs)
2367 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2368 else
2369 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2370 cb = cbIdtr*2;
2371 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2372 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2373 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2374 }
2375 g_usBs3TestStep++;
2376 }
2377 }
2378
2379 pbTest += X86_PAGE_SIZE;
2380 uFlatTest += X86_PAGE_SIZE;
2381 }
2382
2383 Bs3MemGuardedTestPageFree(pbTest);
2384 }
2385
2386 /*
2387 * Check non-canonical 64-bit space.
2388 */
2389 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2390 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2391 {
2392 /* Make our references relative to the gap. */
2393 pbTest += g_cbBs3PagingOneCanonicalTrap;
2394
2395 /* Hit it from below. */
2396 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2397 {
2398 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2399 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2400 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2401 if (off + cbIdtr <= 0)
2402 {
2403 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2404 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2405 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2406 }
2407 else
2408 {
2409 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2410 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2411 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2412 off2 = off <= -2 ? 2 : 0;
2413 cb = cbIdtr - off2;
2414 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2415 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2416 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2417 }
2418 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2419 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2420 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2421 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2422 }
2423
2424 /* Hit it from above. */
2425 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2426 {
2427 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2428 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2429 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2430 if (off >= 0)
2431 {
2432 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2433 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2434 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2435 }
2436 else
2437 {
2438 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2439 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2440 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2441 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2442 }
2443 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2444 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2445 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2446 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2447 }
2448
2449 }
2450}
2451
2452
2453static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2454 uint8_t const *pbExpected)
2455{
2456 unsigned idx;
2457 unsigned bRing;
2458 unsigned iStep = 0;
2459
2460 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2461 test and don't want to bother with double faults. */
2462 for (bRing = 0; bRing <= 3; bRing++)
2463 {
2464 for (idx = 0; idx < cWorkers; idx++)
2465 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2466 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2467 {
2468 g_usBs3TestStep = iStep;
2469 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2470 iStep += 1000;
2471 }
2472 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2473 break;
2474 }
2475}
2476
2477
2478BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2479{
2480 union
2481 {
2482 RTIDTR Idtr;
2483 uint8_t ab[16];
2484 } Expected;
2485
2486 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2487 bs3CpuBasic2_SetGlobals(bMode);
2488
2489 /*
2490 * Pass to common worker which is only compiled once per mode.
2491 */
2492 Bs3MemZero(&Expected, sizeof(Expected));
2493 ASMGetIDTR(&Expected.Idtr);
2494 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2495
2496 /*
2497 * Re-initialize the IDT.
2498 */
2499 Bs3TrapReInit();
2500 return 0;
2501}
2502
2503
2504BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2505{
2506 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2507 uint64_t uNew = 0;
2508 union
2509 {
2510 RTGDTR Gdtr;
2511 uint8_t ab[16];
2512 } Expected;
2513
2514 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2515 bs3CpuBasic2_SetGlobals(bMode);
2516
2517 /*
2518 * If paged mode, try push the GDT way up.
2519 */
2520 Bs3MemZero(&Expected, sizeof(Expected));
2521 ASMGetGDTR(&Expected.Gdtr);
2522 if (BS3_MODE_IS_PAGED(bMode))
2523 {
2524/** @todo loading non-canonical base addresses. */
2525 int rc;
2526 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2527 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2528 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2529 if (RT_SUCCESS(rc))
2530 {
2531 Bs3Lgdt_Gdt.uAddr = uNew;
2532 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2533 ASMGetGDTR(&Expected.Gdtr);
2534 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2535 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2536 }
2537 }
2538
2539 /*
2540 * Pass to common worker which is only compiled once per mode.
2541 */
2542 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2543
2544 /*
2545 * Unalias the GDT.
2546 */
2547 if (uNew != 0)
2548 {
2549 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2550 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2551 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2552 }
2553
2554 /*
2555 * Re-initialize the IDT.
2556 */
2557 Bs3TrapReInit();
2558 return 0;
2559}
2560
2561
2562
2563/*
2564 * LIDT & LGDT
2565 */
2566
2567/**
2568 * Executes one round of LIDT and LGDT tests using one assembly worker.
2569 *
2570 * This is written with driving everything from the 16-bit or 32-bit worker in
2571 * mind, i.e. not assuming the test bitcount is the same as the current.
2572 */
2573static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2574 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2575{
2576 static const struct
2577 {
2578 bool fGP;
2579 uint16_t cbLimit;
2580 uint64_t u64Base;
2581 } s_aValues64[] =
2582 {
2583 { false, 0x0000, UINT64_C(0x0000000000000000) },
2584 { false, 0x0001, UINT64_C(0x0000000000000001) },
2585 { false, 0x0002, UINT64_C(0x0000000000000010) },
2586 { false, 0x0003, UINT64_C(0x0000000000000123) },
2587 { false, 0x0004, UINT64_C(0x0000000000001234) },
2588 { false, 0x0005, UINT64_C(0x0000000000012345) },
2589 { false, 0x0006, UINT64_C(0x0000000000123456) },
2590 { false, 0x0007, UINT64_C(0x0000000001234567) },
2591 { false, 0x0008, UINT64_C(0x0000000012345678) },
2592 { false, 0x0009, UINT64_C(0x0000000123456789) },
2593 { false, 0x000a, UINT64_C(0x000000123456789a) },
2594 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2595 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2596 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2597 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2598 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2599 { true, 0x0000, UINT64_C(0x0000800000000000) },
2600 { true, 0x0000, UINT64_C(0x0000800000000333) },
2601 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2602 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2603 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2604 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2605 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2606 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2607 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2608 { false, 0x5678, UINT64_C(0xffff800000000000) },
2609 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2610 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2611 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2612 };
2613 static const struct
2614 {
2615 uint16_t cbLimit;
2616 uint32_t u32Base;
2617 } s_aValues32[] =
2618 {
2619 { 0xdfdf, UINT32_C(0xefefefef) },
2620 { 0x0000, UINT32_C(0x00000000) },
2621 { 0x0001, UINT32_C(0x00000001) },
2622 { 0x0002, UINT32_C(0x00000012) },
2623 { 0x0003, UINT32_C(0x00000123) },
2624 { 0x0004, UINT32_C(0x00001234) },
2625 { 0x0005, UINT32_C(0x00012345) },
2626 { 0x0006, UINT32_C(0x00123456) },
2627 { 0x0007, UINT32_C(0x01234567) },
2628 { 0x0008, UINT32_C(0x12345678) },
2629 { 0x0009, UINT32_C(0x80204060) },
2630 { 0x000a, UINT32_C(0xddeeffaa) },
2631 { 0x000b, UINT32_C(0xfdecdbca) },
2632 { 0x000c, UINT32_C(0x6098456b) },
2633 { 0x000d, UINT32_C(0x98506099) },
2634 { 0x000e, UINT32_C(0x206950bc) },
2635 { 0x000f, UINT32_C(0x9740395d) },
2636 { 0x0334, UINT32_C(0x64a9455e) },
2637 { 0xb423, UINT32_C(0xd20b6eff) },
2638 { 0x4955, UINT32_C(0x85296d46) },
2639 { 0xffff, UINT32_C(0x07000039) },
2640 { 0xefe1, UINT32_C(0x0007fe00) },
2641 };
2642
2643 BS3TRAPFRAME TrapCtx;
2644 BS3REGCTX Ctx;
2645 BS3REGCTX CtxUdExpected;
2646 BS3REGCTX TmpCtx;
2647 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2648 uint8_t abBufSave[32]; /* For saving the result after loading. */
2649 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2650 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2651 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2652 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2653 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2654 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2655 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2656 ? 3 : 4;
2657 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2658 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2659 uint8_t bFiller1; /* For filling abBufLoad. */
2660 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2661 int off;
2662 uint8_t BS3_FAR *pbTest;
2663 unsigned i;
2664
2665 /* make sure they're allocated */
2666 Bs3MemZero(&Ctx, sizeof(Ctx));
2667 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2668 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2669 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2670 Bs3MemZero(abBufSave, sizeof(abBufSave));
2671 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2672 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2673
2674 /*
2675 * Create a context, giving this routine some more stack space.
2676 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2677 * - Point DS/SS:xBX at abBufLoad.
2678 * - Point ES:xDI at abBufSave.
2679 * - Point ES:xSI at abBufRestore.
2680 */
2681 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2682 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2683 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2684 g_uBs3TrapEipHint = Ctx.rip.u32;
2685 Ctx.rflags.u16 &= ~X86_EFL_IF;
2686 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2687
2688 pbBufSave = abBufSave;
2689 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2690 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2691 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2692
2693 pbBufRestore = abBufRestore;
2694 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2695 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2696 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2697 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2698
2699 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2700 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2701
2702 /* For successful SIDT attempts, we'll stop at the UD2. */
2703 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2704 CtxUdExpected.rip.u += pWorker->cbInstr;
2705
2706 /*
2707 * Check that it works at all.
2708 */
2709 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2710 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2711 Bs3MemZero(abBufSave, sizeof(abBufSave));
2712 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2713 if (bRing != 0)
2714 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2715 else
2716 {
2717 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2718 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2719 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2720 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2721 }
2722 g_usBs3TestStep++;
2723
2724 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2725 bFiller1 = ~0x55;
2726 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2727 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2728 || bFiller1 == 0xff)
2729 bFiller1++;
2730 bFiller2 = 0x33;
2731 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2732 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2733 || bFiller2 == 0xff
2734 || bFiller2 == bFiller1)
2735 bFiller2++;
2736 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2737 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2738
2739 /* Again with a buffer filled with a byte not occuring in the previous result. */
2740 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2741 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2742 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2743 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2744 if (bRing != 0)
2745 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2746 else
2747 {
2748 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2749 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2750 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2751 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2752 }
2753 g_usBs3TestStep++;
2754
2755 /*
2756 * Try loading a bunch of different limit+base value to check what happens,
2757 * especially what happens wrt the top part of the base in 16-bit mode.
2758 */
2759 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2760 {
2761 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2762 {
2763 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2764 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2765 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2766 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2767 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2768 if (bRing != 0 || s_aValues64[i].fGP)
2769 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2770 else
2771 {
2772 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2773 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2774 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2775 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2776 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2777 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2778 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2779 }
2780 g_usBs3TestStep++;
2781 }
2782 }
2783 else
2784 {
2785 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2786 {
2787 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2788 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2789 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2790 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2791 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2792 if (bRing != 0)
2793 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2794 else
2795 {
2796 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2797 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2798 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2799 || ( cbBaseLoaded != 4
2800 && pbBufSave[2+3] != bTop16BitBase)
2801 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2802 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2803 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2804 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2805 }
2806 g_usBs3TestStep++;
2807 }
2808 }
2809
2810 /*
2811 * Slide the buffer along 8 bytes to cover misalignment.
2812 */
2813 for (off = 0; off < 8; off++)
2814 {
2815 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2816 CtxUdExpected.rbx.u = Ctx.rbx.u;
2817
2818 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2819 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2820 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2821 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2822 if (bRing != 0)
2823 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2824 else
2825 {
2826 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2827 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2828 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2829 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2830 }
2831 g_usBs3TestStep++;
2832 }
2833 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2834 CtxUdExpected.rbx.u = Ctx.rbx.u;
2835
2836 /*
2837 * Play with the selector limit if the target mode supports limit checking
2838 * We use BS3_SEL_TEST_PAGE_00 for this
2839 */
2840 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2841 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2842 {
2843 uint16_t cbLimit;
2844 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2845 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2846 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2847 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2848 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2849 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2850
2851 if (pWorker->fSs)
2852 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2853 else
2854 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2855
2856 /* Expand up (normal). */
2857 for (off = 0; off < 8; off++)
2858 {
2859 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2860 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2861 {
2862 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2863
2864 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2865 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2866 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2867 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2868 if (bRing != 0)
2869 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2870 else if (off + cbIdtr <= cbLimit + 1)
2871 {
2872 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2873 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2874 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2875 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2876 }
2877 else if (pWorker->fSs)
2878 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2879 else
2880 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2881 g_usBs3TestStep++;
2882
2883 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2884 abBufLoad[off] = abBufLoad[off + 1] = 0;
2885 abBufLoad[off + 2] |= 1;
2886 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2887 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2888 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2889 if (bRing != 0)
2890 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2891 else if (off + cbIdtr <= cbLimit + 1)
2892 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2893 else if (pWorker->fSs)
2894 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2895 else
2896 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2897 }
2898 }
2899
2900 /* Expand down (weird). Inverted valid area compared to expand up,
2901 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2902 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2903 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2904 (because in a normal expand up the 0ffffh means all 64KB are
2905 accessible). */
2906 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2907 for (off = 0; off < 8; off++)
2908 {
2909 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2910 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2911 {
2912 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2913
2914 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2915 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2916 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2917 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2918 if (bRing != 0)
2919 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2920 else if (off > cbLimit)
2921 {
2922 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2923 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2924 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2925 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2926 }
2927 else if (pWorker->fSs)
2928 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2929 else
2930 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2931 g_usBs3TestStep++;
2932
2933 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2934 abBufLoad[off] = abBufLoad[off + 1] = 0;
2935 abBufLoad[off + 2] |= 3;
2936 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2937 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2938 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2939 if (bRing != 0)
2940 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2941 else if (off > cbLimit)
2942 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2943 else if (pWorker->fSs)
2944 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2945 else
2946 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2947 }
2948 }
2949
2950 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2951 CtxUdExpected.rbx.u = Ctx.rbx.u;
2952 CtxUdExpected.ss = Ctx.ss;
2953 CtxUdExpected.ds = Ctx.ds;
2954 }
2955
2956 /*
2957 * Play with the paging.
2958 */
2959 if ( BS3_MODE_IS_PAGED(bTestMode)
2960 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2961 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2962 {
2963 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2964
2965 /*
2966 * Slide the load buffer towards the trailing guard page.
2967 */
2968 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2969 CtxUdExpected.ss = Ctx.ss;
2970 CtxUdExpected.ds = Ctx.ds;
2971 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2972 {
2973 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2974 if (off < X86_PAGE_SIZE)
2975 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2976 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2977 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2978 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2979 if (bRing != 0)
2980 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2981 else if (off + cbIdtr <= X86_PAGE_SIZE)
2982 {
2983 CtxUdExpected.rbx = Ctx.rbx;
2984 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2985 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2986 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2987 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2988 }
2989 else
2990 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2991 g_usBs3TestStep++;
2992
2993 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2994 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2995 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2996 && ( off != X86_PAGE_SIZE - 2
2997 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2998 )
2999 {
3000 pbTest[off] = 0;
3001 if (off + 1 < X86_PAGE_SIZE)
3002 pbTest[off + 1] = 0;
3003 if (off + 2 < X86_PAGE_SIZE)
3004 pbTest[off + 2] |= 7;
3005 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3006 if (bRing != 0)
3007 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3008 else
3009 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3010 g_usBs3TestStep++;
3011 }
3012 }
3013
3014 /*
3015 * Now, do it the other way around. It should look normal now since writing
3016 * the limit will #PF first and nothing should be written.
3017 */
3018 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
3019 {
3020 Bs3MemSet(pbTest, bFiller1, 48);
3021 if (off >= 0)
3022 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3023 else if (off + cbIdtr > 0)
3024 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
3025 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
3026 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3027 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3028 if (bRing != 0)
3029 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3030 else if (off >= 0)
3031 {
3032 CtxUdExpected.rbx = Ctx.rbx;
3033 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3034 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
3035 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
3036 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3037 }
3038 else
3039 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3040 g_usBs3TestStep++;
3041
3042 /* Again with messed up base as well (triple fault if buggy). */
3043 if (off < 0 && off > -cbIdtr)
3044 {
3045 if (off + 2 >= 0)
3046 pbTest[off + 2] |= 15;
3047 pbTest[off + cbIdtr - 1] ^= 0xaa;
3048 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3049 if (bRing != 0)
3050 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3051 else
3052 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3053 g_usBs3TestStep++;
3054 }
3055 }
3056
3057 /*
3058 * Combine paging and segment limit and check ordering.
3059 * This is kind of interesting here since it the instruction seems to
3060 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3061 *
3062 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3063 * that's what f486Weirdness deals with.
3064 */
3065 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3066 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3067 {
3068 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3069 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3070 uint16_t cbLimit;
3071
3072 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3073 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3074 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3075 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3076 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3077
3078 if (pWorker->fSs)
3079 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3080 else
3081 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3082
3083 /* Expand up (normal), approaching tail guard page. */
3084 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3085 {
3086 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3087 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3088 {
3089 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3090 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3091 if (off < X86_PAGE_SIZE)
3092 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3093 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3094 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3095 if (bRing != 0)
3096 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3097 else if (off + cbIdtr <= cbLimit + 1)
3098 {
3099 /* No #GP, but maybe #PF. */
3100 if (off + cbIdtr <= X86_PAGE_SIZE)
3101 {
3102 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3103 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3104 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3105 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3106 }
3107 else
3108 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3109 }
3110 /* No #GP/#SS on limit, but instead #PF? */
3111 else if ( !f486Weirdness
3112 ? off < cbLimit && off >= 0xfff
3113 : off + 2 < cbLimit && off >= 0xffd)
3114 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3115 /* #GP/#SS on limit or base. */
3116 else if (pWorker->fSs)
3117 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3118 else
3119 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3120
3121 g_usBs3TestStep++;
3122
3123 /* Set DS to 0 and check that we get #GP(0). */
3124 if (!pWorker->fSs)
3125 {
3126 Ctx.ds = 0;
3127 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3128 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3129 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3130 g_usBs3TestStep++;
3131 }
3132 }
3133 }
3134
3135 /* Expand down. */
3136 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3137 uFlatTest -= X86_PAGE_SIZE;
3138
3139 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3140 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3141 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3142 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3143
3144 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3145 {
3146 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3147 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3148 {
3149 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3150 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3151 if (off >= X86_PAGE_SIZE)
3152 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3153 else if (off > X86_PAGE_SIZE - cbIdtr)
3154 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3155 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3156 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3157 if (bRing != 0)
3158 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3159 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3160 {
3161 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3162 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3163 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3164 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3165 }
3166 else if (cbLimit < off && off < X86_PAGE_SIZE)
3167 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3168 else if (pWorker->fSs)
3169 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3170 else
3171 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3172 g_usBs3TestStep++;
3173 }
3174 }
3175
3176 pbTest += X86_PAGE_SIZE;
3177 uFlatTest += X86_PAGE_SIZE;
3178 }
3179
3180 Bs3MemGuardedTestPageFree(pbTest);
3181 }
3182
3183 /*
3184 * Check non-canonical 64-bit space.
3185 */
3186 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3187 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3188 {
3189 /* Make our references relative to the gap. */
3190 pbTest += g_cbBs3PagingOneCanonicalTrap;
3191
3192 /* Hit it from below. */
3193 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3194 {
3195 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3196 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3197 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3198 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3199 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3200 if (off + cbIdtr > 0 || bRing != 0)
3201 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3202 else
3203 {
3204 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3205 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3206 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3207 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3208 }
3209 }
3210
3211 /* Hit it from above. */
3212 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3213 {
3214 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3215 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3216 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3217 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3218 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3219 if (off < 0 || bRing != 0)
3220 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3221 else
3222 {
3223 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3224 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3225 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3226 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3227 }
3228 }
3229
3230 }
3231}
3232
3233
3234static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3235 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3236{
3237 unsigned idx;
3238 unsigned bRing;
3239 unsigned iStep = 0;
3240
3241 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3242 test and don't want to bother with double faults. */
3243 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3244 {
3245 for (idx = 0; idx < cWorkers; idx++)
3246 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3247 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3248 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3249 || ( bTestMode > BS3_MODE_PE16
3250 || ( bTestMode == BS3_MODE_PE16
3251 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3252 {
3253 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3254 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3255 g_usBs3TestStep = iStep;
3256 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3257 iStep += 1000;
3258 }
3259 if (BS3_MODE_IS_RM_SYS(bTestMode))
3260 break;
3261 }
3262}
3263
3264
3265BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3266{
3267 union
3268 {
3269 RTIDTR Idtr;
3270 uint8_t ab[32]; /* At least cbIdtr*2! */
3271 } Expected;
3272
3273 //if (bMode != BS3_MODE_LM64) return 0;
3274 bs3CpuBasic2_SetGlobals(bMode);
3275
3276 /*
3277 * Pass to common worker which is only compiled once per mode.
3278 */
3279 Bs3MemZero(&Expected, sizeof(Expected));
3280 ASMGetIDTR(&Expected.Idtr);
3281
3282 if (BS3_MODE_IS_RM_SYS(bMode))
3283 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3284 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3285 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3286 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3287 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3288 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3289 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3290 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3291 else
3292 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3293 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3294
3295 /*
3296 * Re-initialize the IDT.
3297 */
3298 Bs3TrapReInit();
3299 return 0;
3300}
3301
3302
3303BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3304{
3305 union
3306 {
3307 RTGDTR Gdtr;
3308 uint8_t ab[32]; /* At least cbIdtr*2! */
3309 } Expected;
3310
3311 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3312 bs3CpuBasic2_SetGlobals(bMode);
3313
3314 /*
3315 * Pass to common worker which is only compiled once per mode.
3316 */
3317 if (BS3_MODE_IS_RM_SYS(bMode))
3318 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3319 Bs3MemZero(&Expected, sizeof(Expected));
3320 ASMGetGDTR(&Expected.Gdtr);
3321
3322 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3323 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3324
3325 /*
3326 * Re-initialize the IDT.
3327 */
3328 Bs3TrapReInit();
3329 return 0;
3330}
3331
3332typedef union IRETBUF
3333{
3334 uint64_t au64[6]; /* max req is 5 */
3335 uint32_t au32[12]; /* max req is 9 */
3336 uint16_t au16[24]; /* max req is 5 */
3337 uint8_t ab[48];
3338} IRETBUF;
3339typedef IRETBUF BS3_FAR *PIRETBUF;
3340
3341
3342static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3343 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3344{
3345 if (cbPop == 2)
3346 {
3347 pIretBuf->au16[0] = (uint16_t)uPC;
3348 pIretBuf->au16[1] = uCS;
3349 pIretBuf->au16[2] = (uint16_t)fEfl;
3350 pIretBuf->au16[3] = (uint16_t)uSP;
3351 pIretBuf->au16[4] = uSS;
3352 }
3353 else if (cbPop != 8)
3354 {
3355 pIretBuf->au32[0] = (uint32_t)uPC;
3356 pIretBuf->au16[1*2] = uCS;
3357 pIretBuf->au32[2] = (uint32_t)fEfl;
3358 pIretBuf->au32[3] = (uint32_t)uSP;
3359 pIretBuf->au16[4*2] = uSS;
3360 }
3361 else
3362 {
3363 pIretBuf->au64[0] = uPC;
3364 pIretBuf->au16[1*4] = uCS;
3365 pIretBuf->au64[2] = fEfl;
3366 pIretBuf->au64[3] = uSP;
3367 pIretBuf->au16[4*4] = uSS;
3368 }
3369}
3370
3371
3372static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3373 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3374{
3375 BS3TRAPFRAME TrapCtx;
3376 BS3REGCTX Ctx;
3377 BS3REGCTX CtxUdExpected;
3378 BS3REGCTX TmpCtx;
3379 BS3REGCTX TmpCtxExpected;
3380 uint8_t abLowUd[8];
3381 uint8_t abLowIret[8];
3382 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3383 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3384 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3385 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3386 int iRingDst;
3387 int iRingSrc;
3388 uint16_t uDplSs;
3389 uint16_t uRplCs;
3390 uint16_t uRplSs;
3391// int i;
3392 uint8_t BS3_FAR *pbTest;
3393
3394 NOREF(abLowUd);
3395#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3396 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3397#define IRETBUF_SET_REG(a_idx, a_uValue) \
3398 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3399 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3400 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3401 else *(uint64_t)pbTmp = (a_uValue); \
3402 } while (0)
3403
3404 /* make sure they're allocated */
3405 Bs3MemZero(&Ctx, sizeof(Ctx));
3406 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3407 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3408 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3409 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3410
3411 /*
3412 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3413 * copies of both iret and ud in the first 64KB of memory. The stack is
3414 * below 64KB, so we'll just copy the instructions onto the stack.
3415 */
3416 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3417 Bs3MemCpy(abLowIret, pfnIret, 4);
3418
3419 /*
3420 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3421 * - Point the context at our iret instruction.
3422 * - Point SS:xSP at pIretBuf.
3423 */
3424 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3425 if (!fUseLowCode)
3426 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3427 else
3428 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3429 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3430 g_uBs3TrapEipHint = Ctx.rip.u32;
3431 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3432
3433 /*
3434 * The first success (UD) context keeps the same code bit-count as the iret.
3435 */
3436 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3437 if (!fUseLowCode)
3438 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3439 else
3440 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3441 CtxUdExpected.rsp.u += cbSameCplFrame;
3442
3443 /*
3444 * Check that it works at all.
3445 */
3446 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3447 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3448
3449 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3450 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3451 g_usBs3TestStep++;
3452
3453 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3454 {
3455 /* Selectors are modified when switching rings, so we need to know
3456 what we're dealing with there. */
3457 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3458 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3459 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3460 if (Ctx.fs || Ctx.gs)
3461 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3462
3463 /*
3464 * Test returning to outer rings if protected mode.
3465 */
3466 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3467 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3468 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3469 {
3470 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3471 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3472 TmpCtx.es = TmpCtxExpected.es;
3473 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3474 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3475 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3476 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3477 g_usBs3TestStep++;
3478 }
3479
3480 /*
3481 * Check CS.RPL and SS.RPL.
3482 */
3483 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3484 {
3485 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3486 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3487 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3488 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3489 {
3490 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3491 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3492 TmpCtx.es = TmpCtxExpected.es;
3493 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3494 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3495 {
3496 uint16_t const uSrcEs = TmpCtx.es;
3497 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3498 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3499
3500 /* CS.RPL */
3501 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3502 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3503 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3504 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3505 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3506 else
3507 {
3508 if (iRingDst < iRingSrc)
3509 TmpCtx.es = 0;
3510 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3511 TmpCtx.es = uSrcEs;
3512 }
3513 g_usBs3TestStep++;
3514
3515 /* SS.RPL */
3516 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3517 {
3518 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3519 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3520 {
3521 /* SS.DPL (iRingDst == CS.DPL) */
3522 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3523 {
3524 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3525 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3526 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3527 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3528
3529 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3530 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3531 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3532 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3533 {
3534 if (iRingDst < iRingSrc)
3535 TmpCtx.es = 0;
3536 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3537 }
3538 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3539 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3540 else
3541 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3542 TmpCtx.es = uSrcEs;
3543 g_usBs3TestStep++;
3544 }
3545 }
3546
3547 TmpCtxExpected.ss = uSavedDstSs;
3548 }
3549 }
3550 }
3551 }
3552 }
3553
3554 /*
3555 * Special 64-bit checks.
3556 */
3557 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3558 {
3559 /* The VM flag is completely ignored. */
3560 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3561 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3562 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3563 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3564 g_usBs3TestStep++;
3565
3566 /* The NT flag can be loaded just fine. */
3567 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3568 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3569 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3570 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3571 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3572 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3573 g_usBs3TestStep++;
3574
3575 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3576 Ctx.rflags.u32 |= X86_EFL_NT;
3577 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3578 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3579 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3580 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3581 g_usBs3TestStep++;
3582
3583 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3584 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3585 if (pbTest != NULL)
3586 {
3587 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3588 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3589 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3590 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3591 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3592 g_usBs3TestStep++;
3593
3594 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3595 Bs3MemGuardedTestPageFree(pbTest);
3596 }
3597 Ctx.rflags.u32 &= ~X86_EFL_NT;
3598 }
3599}
3600
3601
3602BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3603{
3604 struct
3605 {
3606 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3607 IRETBUF IRetBuf;
3608 uint8_t abGuard[32];
3609 } uBuf;
3610 size_t cbUnused;
3611
3612 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3613 bs3CpuBasic2_SetGlobals(bMode);
3614
3615 /*
3616 * Primary instruction form.
3617 */
3618 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3619 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3620 if (BS3_MODE_IS_16BIT_CODE(bMode))
3621 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3622 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3623 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3624 else
3625 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3626
3627 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3628 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3629 - (uintptr_t)uBuf.abExtraStack;
3630 if (cbUnused < 2048)
3631 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3632
3633 /*
3634 * Secondary variation: opsize prefixed.
3635 */
3636 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3637 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3638 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3639 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3640 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3641 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3642 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3643 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3644 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3645 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3646 - (uintptr_t)uBuf.abExtraStack;
3647 if (cbUnused < 2048)
3648 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3649
3650 /*
3651 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3652 */
3653 if (BS3_MODE_IS_64BIT_CODE(bMode))
3654 {
3655 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3656 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3657 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3658 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3659 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3660 - (uintptr_t)uBuf.abExtraStack;
3661 if (cbUnused < 2048)
3662 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3663 }
3664
3665 return 0;
3666}
3667
3668
3669
3670/*********************************************************************************************************************************
3671* Non-far JMP & CALL Tests *
3672*********************************************************************************************************************************/
3673#define PROTO_ALL(a_Template) \
3674 FNBS3FAR a_Template ## _c16, \
3675 a_Template ## _c32, \
3676 a_Template ## _c64
3677PROTO_ALL(bs3CpuBasic2_jmp_jb__ud2);
3678PROTO_ALL(bs3CpuBasic2_jmp_jb_back__ud2);
3679PROTO_ALL(bs3CpuBasic2_jmp_jv__ud2);
3680PROTO_ALL(bs3CpuBasic2_jmp_jv_back__ud2);
3681PROTO_ALL(bs3CpuBasic2_jmp_ind_mem__ud2);
3682PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX__ud2);
3683PROTO_ALL(bs3CpuBasic2_jmp_ind_xDI__ud2);
3684FNBS3FAR bs3CpuBasic2_jmp_ind_r9__ud2_c64;
3685PROTO_ALL(bs3CpuBasic2_call_jv__ud2);
3686PROTO_ALL(bs3CpuBasic2_call_jv_back__ud2);
3687PROTO_ALL(bs3CpuBasic2_call_ind_mem__ud2);
3688PROTO_ALL(bs3CpuBasic2_call_ind_xAX__ud2);
3689PROTO_ALL(bs3CpuBasic2_call_ind_xDI__ud2);
3690FNBS3FAR bs3CpuBasic2_call_ind_r9__ud2_c64;
3691
3692PROTO_ALL(bs3CpuBasic2_jmp_opsize_begin);
3693PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize__ud2);
3694PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize_back__ud2);
3695PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize__ud2);
3696PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize_back__ud2);
3697PROTO_ALL(bs3CpuBasic2_jmp_ind_mem_opsize__ud2);
3698FNBS3FAR bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64;
3699PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX_opsize__ud2);
3700PROTO_ALL(bs3CpuBasic2_call_jv_opsize__ud2);
3701PROTO_ALL(bs3CpuBasic2_call_jv_opsize_back__ud2);
3702PROTO_ALL(bs3CpuBasic2_call_ind_mem_opsize__ud2);
3703FNBS3FAR bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64;
3704PROTO_ALL(bs3CpuBasic2_call_ind_xAX_opsize__ud2);
3705PROTO_ALL(bs3CpuBasic2_jmp_opsize_end);
3706#undef PROTO_ALL
3707
3708FNBS3FAR bs3CpuBasic2_jmptext16_start;
3709
3710FNBS3FAR bs3CpuBasic2_jmp_target_wrap_forward;
3711FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_forward__ud2;
3712FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2;
3713FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_forward__ud2;
3714FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2;
3715FNBS3FAR bs3CpuBasic2_call_jv16_wrap_forward__ud2;
3716FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2;
3717
3718FNBS3FAR bs3CpuBasic2_jmp_target_wrap_backward;
3719FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_backward__ud2;
3720FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2;
3721FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_backward__ud2;
3722FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2;
3723FNBS3FAR bs3CpuBasic2_call_jv16_wrap_backward__ud2;
3724FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2;
3725
3726
3727
3728/**
3729 * Entrypoint for non-far JMP & CALL tests.
3730 *
3731 * @returns 0 or BS3TESTDOMODE_SKIPPED.
3732 * @param bMode The CPU mode we're testing.
3733 *
3734 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
3735 * with control registers and such.
3736 */
3737BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_jmp_call)(uint8_t bMode)
3738{
3739 BS3TRAPFRAME TrapCtx;
3740 BS3REGCTX Ctx;
3741 BS3REGCTX CtxExpected;
3742 unsigned iTest;
3743 unsigned const cMaxRecompRuns = g_cBs3ThresholdNativeRecompiler ? g_cBs3ThresholdNativeRecompiler : 1;
3744 unsigned iRecompRun;
3745
3746 /* make sure they're allocated */
3747 Bs3MemZero(&Ctx, sizeof(Ctx));
3748 Bs3MemZero(&CtxExpected, sizeof(Ctx));
3749 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3750
3751 bs3CpuBasic2_SetGlobals(bMode);
3752
3753 /*
3754 * Create a context.
3755 */
3756 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
3757 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
3758
3759 /*
3760 * 16-bit tests.
3761 *
3762 * When opsize is 16-bit relative jumps will do 16-bit calculations and
3763 * modify IP. This means that it is not possible to trigger a segment
3764 * limit #GP(0) when the limit is set to 0xffff.
3765 */
3766 if (BS3_MODE_IS_16BIT_CODE(bMode))
3767 {
3768 static struct
3769 {
3770 int8_t iWrap;
3771 bool fOpSizePfx;
3772 int8_t iGprIndirect;
3773 bool fCall;
3774 FPFNBS3FAR pfnTest;
3775 }
3776 const s_aTests[] =
3777 {
3778 { 0, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c16, },
3779 { 0, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c16, },
3780 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c16, },
3781 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c16, },
3782 { 0, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c16, },
3783 { 0, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c16, },
3784 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c16, },
3785 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c16, },
3786 { 0, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c16, },
3787 { 0, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c16, },
3788 { 0, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c16, },
3789 { 0, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c16, },
3790 { 0, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c16, },
3791 { 0, false, -1, true, bs3CpuBasic2_call_jv__ud2_c16, },
3792 { 0, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c16, },
3793 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c16, },
3794 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c16, },
3795 { 0, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c16, },
3796 { 0, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c16, },
3797 { 0, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c16, },
3798 { 0, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c16, },
3799 { 0, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c16, },
3800
3801 { -1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_backward__ud2, },
3802 { +1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_forward__ud2, },
3803 { -1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2, },
3804 { +1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2, },
3805
3806 { -1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_backward__ud2, },
3807 { +1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_forward__ud2, },
3808 { -1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2, },
3809 { +1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2, },
3810 { -1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_backward__ud2, },
3811 { +1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_forward__ud2, },
3812 { -1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2, },
3813 { +1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2, },
3814 };
3815
3816 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3817 Bs3SelSetup16BitCode(&Bs3GdteSpare03, Bs3SelLnkPtrToFlat(bs3CpuBasic2_jmptext16_start), 0);
3818
3819 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3820 {
3821 uint64_t uGprSaved;
3822 if (s_aTests[iTest].iWrap == 0)
3823 {
3824 uint8_t const BS3_FAR *fpbCode;
3825 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
3826 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
3827 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
3828 }
3829 else
3830 {
3831 if (BS3_MODE_IS_RM_OR_V86(bMode))
3832 Ctx.cs = BS3_FP_SEG(s_aTests[iTest].pfnTest);
3833 else
3834 Ctx.cs = BS3_SEL_SPARE_03;
3835 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3836 if (s_aTests[iTest].fOpSizePfx)
3837 CtxExpected.rip.u = Ctx.rip.u;
3838 else if (s_aTests[iTest].iWrap < 0)
3839 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3840 else
3841 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_forward);
3842 }
3843 CtxExpected.cs = Ctx.cs;
3844 if (s_aTests[iTest].iGprIndirect >= 0)
3845 {
3846 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
3847 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
3848 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
3849 }
3850 CtxExpected.rsp.u = Ctx.rsp.u;
3851 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3852 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3853 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u);
3854
3855 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3856 {
3857 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3858 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3859 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
3860 else
3861 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3862 }
3863 g_usBs3TestStep++;
3864
3865 /* Again single stepping: */
3866 //Bs3TestPrintf("stepping...\n");
3867 Ctx.rflags.u16 |= X86_EFL_TF;
3868 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3869 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3870 {
3871 Bs3RegSetDr6(0);
3872 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3873 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3874 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
3875 else
3876 {
3877 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3878 bs3CpuBasic2_CheckDr6InitVal();
3879 }
3880 }
3881 Ctx.rflags.u16 &= ~X86_EFL_TF;
3882 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3883 g_usBs3TestStep++;
3884
3885 if (s_aTests[iTest].iGprIndirect >= 0)
3886 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
3887 }
3888
3889 /* Limit the wraparound CS segment to exclude bs3CpuBasic2_jmp_target_wrap_backward
3890 and run the backward wrapping tests. */
3891 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3892 {
3893 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward) - 1;
3894 CtxExpected.cs = Ctx.cs = BS3_SEL_SPARE_03;
3895 CtxExpected.rsp.u = Ctx.rsp.u;
3896 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3897 if (s_aTests[iTest].iWrap < 0)
3898 {
3899 CtxExpected.rip.u = Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3900 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v1\n", Ctx.cs, Ctx.rip.u);
3901 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3902 {
3903 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3904 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3905 }
3906 g_usBs3TestStep++;
3907 }
3908
3909 /* Do another round where we put the limit in the middle of the UD2
3910 instruction we're jumping to: */
3911 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3912 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3913 if (s_aTests[iTest].iWrap < 0)
3914 {
3915 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3916 if (s_aTests[iTest].fOpSizePfx)
3917 CtxExpected.rip.u = Ctx.rip.u;
3918 else
3919 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3920 CtxExpected.rsp.u = Ctx.rsp.u;
3921 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3922 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3923 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
3924 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3925 {
3926 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3927 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3928 }
3929 g_usBs3TestStep++;
3930 }
3931 }
3932
3933 }
3934 /*
3935 * 32-bit & 64-bit tests.
3936 *
3937 * When the opsize prefix is applied here, IP is updated and bits 63:16
3938 * cleared. However in 64-bit mode, Intel ignores the opsize prefix
3939 * whereas AMD doesn't and it works like you expect.
3940 */
3941 else
3942 {
3943 static struct
3944 {
3945 uint8_t cBits;
3946 bool fOpSizePfx;
3947 bool fIgnPfx;
3948 int8_t iGprIndirect;
3949 bool fCall;
3950 FPFNBS3FAR pfnTest;
3951 }
3952 const s_aTests[] =
3953 {
3954 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3955 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3956 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3957 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3958 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3959 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3960 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3961 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3962 { 32, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c32, },
3963 { 32, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c32, },
3964 { 32, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c32, },
3965 { 32, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c32, },
3966 { 32, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c32, },
3967 { 32, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, },
3968 { 32, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3969 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3970 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3971 { 32, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c32, },
3972 { 32, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c32, },
3973 { 32, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c32, },
3974 { 32, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c32, },
3975 { 32, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c32, },
3976 /* 64bit/Intel: Use the _c64 tests, which are written to ignore the o16 prefix. */
3977 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb__ud2_c64, },
3978 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c64, },
3979 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c64, },
3980 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c64, },
3981 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv__ud2_c64, },
3982 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c64, },
3983 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c64, },
3984 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c64, },
3985 { 64, false, true, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, },
3986 { 64, true, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64, },
3987 { 64, false, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, },
3988 { 64, false, true, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, },
3989 { 64, false, true, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, },
3990 { 64, true, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3991 { 64, false, true, -1, true, bs3CpuBasic2_call_jv__ud2_c64, },
3992 { 64, false, true, -1, true, bs3CpuBasic2_call_jv_back__ud2_c64, },
3993 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c64, },
3994 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c64, },
3995 { 64, false, true, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, },
3996 { 64, true, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64,},
3997 { 64, false, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, },
3998 { 64, false, true, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, },
3999 { 64, false, true, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, },
4000 { 64, true, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
4001 /* 64bit/AMD: Use the _c32 tests. */
4002 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
4003 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
4004 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
4005 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
4006 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
4007 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
4008 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
4009 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
4010 { 64, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, }, /* using c64 here */
4011 { 64, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c64, }, /* ditto */
4012 { 64, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, }, /* ditto */
4013 { 64, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, }, /* ditto */
4014 { 64, false, false, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, }, /* ditto */
4015 { 64, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* ditto */
4016 { 64, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, }, /* using c32 again */
4017 { 64, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
4018 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
4019 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
4020 { 64, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, }, /* using c64 here */
4021 { 64, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c64, }, /* ditto */
4022 { 64, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, }, /* ditto */
4023 { 64, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, }, /* ditto */
4024 { 64, false, false, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, }, /* ditto */
4025 { 64, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* ditto */
4026 };
4027 uint8_t const cBits = BS3_MODE_IS_64BIT_CODE(bMode) ? 64 : 32;
4028 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4029 bool const fIgnPfx = cBits == 64 && enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4030
4031 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4032 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_begin_c32);
4033 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_end_c64) - offLow;
4034 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4035 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4036 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4037 Bs3TestFailedF("Opsize overriden jumps are out of place: %#x LB %#x\n", offLow, cbLow);
4038 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4039 if (!fIgnPfx)
4040 {
4041 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4042 if (s_aTests[iTest].fOpSizePfx && s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4043 {
4044 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4045 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4046 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4047 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4048 pbCode16[offUd + 1] = 0xf1;
4049 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4050 pbLow[offUd + 1] = 0x0b;
4051 }
4052 }
4053
4054 /* Run the tests. */
4055 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4056 {
4057 if (s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4058 {
4059 uint64_t uGprSaved;
4060 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4061 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4062 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4063 if (s_aTests[iTest].iGprIndirect >= 0)
4064 {
4065 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
4066 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
4067 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
4068 }
4069 if (s_aTests[iTest].fOpSizePfx && !fIgnPfx)
4070 CtxExpected.rip.u &= UINT16_MAX;
4071 CtxExpected.rsp.u = Ctx.rsp.u;
4072 if (s_aTests[iTest].fCall)
4073 CtxExpected.rsp.u -= s_aTests[iTest].cBits == 64 ? 8
4074 : !s_aTests[iTest].fOpSizePfx ? 4 : 2;
4075
4076 //Bs3TestPrintf("cs:rip=%04RX16:%08RX64\n", Ctx.cs, Ctx.rip.u);
4077
4078 if (BS3_MODE_IS_16BIT_SYS(bMode))
4079 g_uBs3TrapEipHint = s_aTests[iTest].fOpSizePfx ? 0 : Ctx.rip.u32;
4080 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4081 {
4082 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4083 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4084 }
4085 g_usBs3TestStep++;
4086
4087 /* Again single stepping: */
4088 //Bs3TestPrintf("stepping...\n");
4089 Ctx.rflags.u16 |= X86_EFL_TF;
4090 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4091 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4092 {
4093 Bs3RegSetDr6(0);
4094 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4095 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4096 }
4097 Ctx.rflags.u16 &= ~X86_EFL_TF;
4098 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4099 g_usBs3TestStep++;
4100
4101 if (s_aTests[iTest].iGprIndirect >= 0)
4102 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
4103 }
4104 }
4105
4106 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4107 }
4108
4109 return 0;
4110}
4111
4112
4113/*********************************************************************************************************************************
4114* FAR JMP & FAR CALL Tests *
4115*********************************************************************************************************************************/
4116#define PROTO_ALL(a_Template) \
4117 FNBS3FAR a_Template ## _c16, \
4118 a_Template ## _c32, \
4119 a_Template ## _c64
4120PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_begin);
4121
4122FNBS3FAR bs3CpuBasic2_jmpf_ptr_rm__ud2_c16;
4123PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r0__ud2);
4124PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r1__ud2);
4125PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r2__ud2);
4126PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r3__ud2);
4127PROTO_ALL(bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2);
4128PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2);
4129PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2);
4130
4131FNBS3FAR bs3CpuBasic2_callf_ptr_rm__ud2_c16;
4132PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r0__ud2);
4133PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r1__ud2);
4134PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r2__ud2);
4135PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r3__ud2);
4136PROTO_ALL(bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2);
4137PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs64__ud2);
4138PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs16l__ud2);
4139
4140FNBS3FAR bs3CpuBasic2_jmpf_mem_rm__ud2_c16;
4141PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r0__ud2);
4142PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r1__ud2);
4143PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r2__ud2);
4144PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r3__ud2);
4145PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16__ud2);
4146PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs32__ud2);
4147PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs64__ud2);
4148PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2);
4149
4150FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64;
4151FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64;
4152FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64;
4153FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64;
4154FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64;
4155FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64;
4156FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64;
4157FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64;
4158
4159FNBS3FAR bs3CpuBasic2_callf_mem_rm__ud2_c16;
4160PROTO_ALL(bs3CpuBasic2_callf_mem_same_r0__ud2);
4161PROTO_ALL(bs3CpuBasic2_callf_mem_same_r1__ud2);
4162PROTO_ALL(bs3CpuBasic2_callf_mem_same_r2__ud2);
4163PROTO_ALL(bs3CpuBasic2_callf_mem_same_r3__ud2);
4164PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16__ud2);
4165PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs32__ud2);
4166PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs64__ud2);
4167PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16l__ud2);
4168
4169FNBS3FAR bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64;
4170FNBS3FAR bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64;
4171FNBS3FAR bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64;
4172FNBS3FAR bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64;
4173FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64;
4174FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64;
4175FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64;
4176FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64;
4177
4178PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_end);
4179#undef PROTO_ALL
4180
4181
4182
4183/**
4184 * Entrypoint for FAR JMP & FAR CALL tests.
4185 *
4186 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4187 * @param bMode The CPU mode we're testing.
4188 *
4189 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
4190 * with control registers and such.
4191 */
4192BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_jmp_call)(uint8_t bMode)
4193{
4194 BS3TRAPFRAME TrapCtx;
4195 BS3REGCTX Ctx;
4196 BS3REGCTX CtxExpected;
4197 unsigned iTest;
4198
4199 /* make sure they're allocated */
4200 Bs3MemZero(&Ctx, sizeof(Ctx));
4201 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4202 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4203
4204 bs3CpuBasic2_SetGlobals(bMode);
4205
4206 /*
4207 * Create a context.
4208 */
4209 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4210 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4211
4212 if (Ctx.rax.u8 == 0 || Ctx.rax.u8 == 0xff) /* for salc & the 64-bit detection */
4213 CtxExpected.rax.u8 = Ctx.rax.u8 = 0x42;
4214
4215 /*
4216 * Set up spare selectors.
4217 */
4218 Bs3GdteSpare00 = Bs3Gdte_CODE16;
4219 Bs3GdteSpare00.Gen.u1Long = 1;
4220
4221 /*
4222 * 16-bit tests.
4223 */
4224 if (BS3_MODE_IS_16BIT_CODE(bMode))
4225 {
4226 static struct
4227 {
4228 bool fRmOrV86;
4229 bool fCall;
4230 uint16_t uDstSel;
4231 uint8_t uDstBits;
4232 bool fOpSizePfx;
4233 FPFNBS3FAR pfnTest;
4234 }
4235 const s_aTests[] =
4236 {
4237 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_ptr_rm__ud2_c16, },
4238 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c16, },
4239 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c16, },
4240 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c16, },
4241 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c16, },
4242 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c16, },
4243 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4244 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4245
4246 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_ptr_rm__ud2_c16, },
4247 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c16, },
4248 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c16, },
4249 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c16, },
4250 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c16, },
4251 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c16, },
4252 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4253 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4254
4255 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_mem_rm__ud2_c16, },
4256 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c16, },
4257 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c16, },
4258 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c16, },
4259 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c16, },
4260 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c16, },
4261 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c16, },
4262 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4263 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4264
4265 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_mem_rm__ud2_c16, },
4266 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c16, },
4267 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c16, },
4268 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c16, },
4269 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c16, },
4270 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c16, },
4271 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c16, },
4272 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4273 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4274 };
4275 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
4276
4277 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4278 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4279 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4280 if (BS3_MODE_IS_64BIT_SYS(bMode))
4281 {
4282 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c16);
4283 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c16) - offLow;
4284 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4285 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4286 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4287 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4288 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4289 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4290 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4291 {
4292 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4293 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4294 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4295 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4296 pbLow[offUd] = 0x0f;
4297 pbLow[offUd + 1] = 0x0b;
4298 }
4299 }
4300
4301 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4302 if (s_aTests[iTest].fRmOrV86 == fRmOrV86)
4303 {
4304 uint64_t const uSavedRsp = Ctx.rsp.u;
4305 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4306 uint8_t const BS3_FAR *fpbCode;
4307
4308 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4309 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4310 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4311 if ( s_aTests[iTest].uDstBits == 32
4312 || ( s_aTests[iTest].uDstBits == 64
4313 && !BS3_MODE_IS_16BIT_SYS(bMode)
4314 && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00))
4315 CtxExpected.rip.u += BS3_ADDR_BS3TEXT16;
4316 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode))
4317 CtxExpected.rip.u &= UINT16_MAX;
4318 CtxExpected.cs = s_aTests[iTest].uDstSel;
4319 if (fGp)
4320 {
4321 CtxExpected.rip.u = Ctx.rip.u;
4322 CtxExpected.cs = Ctx.cs;
4323 }
4324 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4325 CtxExpected.rsp.u = Ctx.rsp.u;
4326 if (s_aTests[iTest].fCall && !fGp)
4327 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 8 : 4;
4328 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4329 {
4330 if (BS3_MODE_IS_64BIT_SYS(bMode))
4331 CtxExpected.rip.u -= 1;
4332 else
4333 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4334 }
4335 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4336 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4337 if (!fGp)
4338 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4339 else
4340 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4341 Ctx.rsp.u = uSavedRsp;
4342 g_usBs3TestStep++;
4343
4344 /* Again single stepping: */
4345 //Bs3TestPrintf("stepping...\n");
4346 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4347 Ctx.rflags.u16 |= X86_EFL_TF;
4348 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4349 CtxExpected.rax.u = Ctx.rax.u;
4350 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4351 CtxExpected.rip.u -= 1;
4352 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4353 if (!fGp)
4354 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4355 else
4356 {
4357 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4358 bs3CpuBasic2_CheckDr6InitVal();
4359 }
4360 Ctx.rflags.u16 &= ~X86_EFL_TF;
4361 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4362 Ctx.rsp.u = uSavedRsp;
4363 g_usBs3TestStep++;
4364 }
4365 }
4366 /*
4367 * 32-bit tests.
4368 */
4369 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4370 {
4371 static struct
4372 {
4373 bool fCall;
4374 uint16_t uDstSel;
4375 uint8_t uDstBits;
4376 bool fOpSizePfx;
4377 FPFNBS3FAR pfnTest;
4378 }
4379 const s_aTests[] =
4380 {
4381 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4382 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4383 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4384 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4385 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4386 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4387 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4388
4389 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4390 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4391 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4392 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4393 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4394 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4395 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4396
4397 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c32, },
4398 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c32, },
4399 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c32, },
4400 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c32, },
4401 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c32, },
4402 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c32, },
4403 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4404 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4405
4406 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c32, },
4407 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c32, },
4408 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c32, },
4409 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c32, },
4410 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c32, },
4411 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c32, },
4412 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4413 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4414 };
4415
4416 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4417 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4418 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4419 if (BS3_MODE_IS_64BIT_SYS(bMode))
4420 {
4421 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c32);
4422 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c32) - offLow;
4423 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4424 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4425 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4426 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4427 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4428 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4429 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4430 {
4431 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4432 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4433 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4434 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4435 pbLow[offUd] = 0x0f;
4436 pbLow[offUd + 1] = 0x0b;
4437 }
4438 }
4439 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4440 {
4441 uint64_t const uSavedRsp = Ctx.rsp.u;
4442 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4443 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4444
4445 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4446 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4447 if ( s_aTests[iTest].uDstBits == 16
4448 || ( s_aTests[iTest].uDstBits == 64
4449 && ( BS3_MODE_IS_16BIT_SYS(bMode))
4450 || s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00))
4451 CtxExpected.rip.u &= UINT16_MAX;
4452 CtxExpected.cs = s_aTests[iTest].uDstSel;
4453 if (fGp)
4454 {
4455 CtxExpected.rip.u = Ctx.rip.u;
4456 CtxExpected.cs = Ctx.cs;
4457 }
4458 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4459 CtxExpected.rsp.u = Ctx.rsp.u;
4460 if (s_aTests[iTest].fCall && !fGp)
4461 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 8;
4462 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4463 {
4464 if (BS3_MODE_IS_64BIT_SYS(bMode))
4465 CtxExpected.rip.u -= 1;
4466 else
4467 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4468 }
4469 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4470 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4471 if (!fGp)
4472 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4473 else
4474 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4475 Ctx.rsp.u = uSavedRsp;
4476 g_usBs3TestStep++;
4477
4478 /* Again single stepping: */
4479 //Bs3TestPrintf("stepping...\n");
4480 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4481 Ctx.rflags.u16 |= X86_EFL_TF;
4482 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4483 CtxExpected.rax.u = Ctx.rax.u;
4484 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4485 CtxExpected.rip.u -= 1;
4486 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4487 if (!fGp)
4488 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4489 else
4490 {
4491 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4492 bs3CpuBasic2_CheckDr6InitVal();
4493 }
4494 Ctx.rflags.u16 &= ~X86_EFL_TF;
4495 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4496 Ctx.rsp.u = uSavedRsp;
4497 g_usBs3TestStep++;
4498 }
4499 }
4500 /*
4501 * 64-bit tests.
4502 */
4503 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4504 {
4505 static struct
4506 {
4507 bool fInvalid;
4508 bool fCall;
4509 uint16_t uDstSel;
4510 uint8_t uDstBits;
4511 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W, 3: 066h REX.W */
4512 int8_t fFix64OpSize;
4513 FPFNBS3FAR pfnTest;
4514 }
4515 const s_aTests[] =
4516 {
4517 /* invalid opcodes: */
4518 { true, false, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4519 { true, false, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4520 { true, false, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4521 { true, false, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4522 { true, false, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4523 { true, false, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, },
4524 { true, false, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, },
4525
4526 { true, true, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4527 { true, true, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4528 { true, true, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4529 { true, true, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4530 { true, true, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4531 { true, true, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, },
4532 { true, true, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, },
4533
4534 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c64, },
4535 { false, false, BS3_SEL_R1_CS64 | 1, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c64, },
4536 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c64, },
4537 { false, false, BS3_SEL_R3_CS64 | 3, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c64, },
4538 { false, false, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c64, },
4539 { false, false, BS3_SEL_R0_CS32, 32, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c64, },
4540 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4541 { false, false, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4542
4543 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64, },
4544 { false, false, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64, },
4545 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64, },
4546 { false, false, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64, },
4547 { false, false, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64, },
4548 { false, false, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64, },
4549 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4550 { false, false, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4551
4552 { false, true, BS3_SEL_R0_CS64, 64, 2, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c64, },
4553 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c64, },
4554 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c64, },
4555 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c64, },
4556 { false, true, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c64, },
4557 { false, true, BS3_SEL_R0_CS32, 32, 2, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c64, },
4558 { false, true, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4559 { false, true, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4560
4561 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64, },
4562 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64, },
4563 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64, },
4564 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64, },
4565 { false, true, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64, },
4566 { false, true, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64, },
4567 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4568 { false, true, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4569 };
4570 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4571 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4572
4573 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4574 {
4575 uint64_t const uSavedRsp = Ctx.rsp.u;
4576 bool const fUd = s_aTests[iTest].fInvalid;
4577 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4578 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4579
4580 if (s_aTests[iTest].fFix64OpSize != fFix64OpSize && s_aTests[iTest].fFix64OpSize >= 0)
4581 continue;
4582
4583 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4584 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4585 CtxExpected.cs = s_aTests[iTest].uDstSel;
4586 if (s_aTests[iTest].uDstBits == 16)
4587 CtxExpected.rip.u &= UINT16_MAX;
4588 else if (s_aTests[iTest].uDstBits == 64 && fFix64OpSize && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00)
4589 CtxExpected.rip.u |= UINT64_C(0xfffff00000000000);
4590
4591 if (fGp || fUd)
4592 {
4593 CtxExpected.rip.u = Ctx.rip.u;
4594 CtxExpected.cs = Ctx.cs;
4595 }
4596 CtxExpected.rsp.u = Ctx.rsp.u;
4597 if (s_aTests[iTest].fCall && !fGp && !fUd)
4598 {
4599 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx == 0 ? 8
4600 : s_aTests[iTest].fOpSizePfx == 1 ? 4 : 16;
4601 //Bs3TestPrintf("cs:rsp=%04RX16:%04RX64 -> %04RX64 (fOpSizePfx=%d)\n", Ctx.ss, Ctx.rsp.u, CtxExpected.rsp.u, s_aTests[iTest].fOpSizePfx);
4602 }
4603 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4604 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4605 if (!fGp || fUd)
4606 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4607 else
4608 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4609 Ctx.rsp.u = uSavedRsp;
4610 g_usBs3TestStep++;
4611
4612 /* Again single stepping: */
4613 //Bs3TestPrintf("stepping...\n");
4614 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4615 Ctx.rflags.u16 |= X86_EFL_TF;
4616 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4617 CtxExpected.rax.u = Ctx.rax.u;
4618 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4619 if (fUd)
4620 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4621 else if (!fGp)
4622 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4623 else
4624 {
4625 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4626 bs3CpuBasic2_CheckDr6InitVal();
4627 }
4628 Ctx.rflags.u16 &= ~X86_EFL_TF;
4629 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4630 Ctx.rsp.u = uSavedRsp;
4631 g_usBs3TestStep++;
4632 }
4633 }
4634 else
4635 Bs3TestFailed("wtf?");
4636
4637 return 0;
4638}
4639
4640
4641/*********************************************************************************************************************************
4642* Near RET *
4643*********************************************************************************************************************************/
4644#define PROTO_ALL(a_Template) \
4645 FNBS3FAR a_Template ## _c16, \
4646 a_Template ## _c32, \
4647 a_Template ## _c64
4648PROTO_ALL(bs3CpuBasic2_retn_opsize_begin);
4649PROTO_ALL(bs3CpuBasic2_retn__ud2);
4650PROTO_ALL(bs3CpuBasic2_retn_opsize__ud2);
4651PROTO_ALL(bs3CpuBasic2_retn_i24__ud2);
4652PROTO_ALL(bs3CpuBasic2_retn_i24_opsize__ud2);
4653PROTO_ALL(bs3CpuBasic2_retn_i760__ud2);
4654PROTO_ALL(bs3CpuBasic2_retn_i5193__ud2);
4655PROTO_ALL(bs3CpuBasic2_retn_i5193_opsize__ud2);
4656PROTO_ALL(bs3CpuBasic2_retn_i0__ud2);
4657PROTO_ALL(bs3CpuBasic2_retn_i0_opsize__ud2);
4658FNBS3FAR bs3CpuBasic2_retn_rexw__ud2_c64;
4659FNBS3FAR bs3CpuBasic2_retn_i24_rexw__ud2_c64;
4660FNBS3FAR bs3CpuBasic2_retn_i5193_rexw__ud2_c64;
4661FNBS3FAR bs3CpuBasic2_retn_opsize_rexw__ud2_c64;
4662FNBS3FAR bs3CpuBasic2_retn_rexw_opsize__ud2_c64;
4663FNBS3FAR bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64;
4664FNBS3FAR bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64;
4665FNBS3FAR bs3CpuBasic2_retn_i5193_opsize_rexw__ud2_c64;
4666FNBS3FAR bs3CpuBasic2_retn_i5193_rexw_opsize__ud2_c64;
4667PROTO_ALL(bs3CpuBasic2_retn_opsize_end);
4668#undef PROTO_ALL
4669
4670
4671static void bs3CpuBasic2_retn_PrepStack(BS3PTRUNION StkPtr, PCBS3REGCTX pCtxExpected, uint8_t cbAddr)
4672{
4673 StkPtr.pu32[3] = UINT32_MAX;
4674 StkPtr.pu32[2] = UINT32_MAX;
4675 StkPtr.pu32[1] = UINT32_MAX;
4676 StkPtr.pu32[0] = UINT32_MAX;
4677 StkPtr.pu32[-1] = UINT32_MAX;
4678 StkPtr.pu32[-2] = UINT32_MAX;
4679 StkPtr.pu32[-3] = UINT32_MAX;
4680 StkPtr.pu32[-4] = UINT32_MAX;
4681 if (cbAddr == 2)
4682 StkPtr.pu16[0] = pCtxExpected->rip.u16;
4683 else if (cbAddr == 4)
4684 StkPtr.pu32[0] = pCtxExpected->rip.u32;
4685 else
4686 StkPtr.pu64[0] = pCtxExpected->rip.u64;
4687}
4688
4689
4690/**
4691 * Entrypoint for NEAR RET tests.
4692 *
4693 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4694 * @param bMode The CPU mode we're testing.
4695 */
4696BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_near_ret)(uint8_t bMode)
4697{
4698 BS3TRAPFRAME TrapCtx;
4699 BS3REGCTX Ctx;
4700 BS3REGCTX CtxExpected;
4701 unsigned iTest;
4702 BS3PTRUNION StkPtr;
4703 unsigned const cMaxRecompRuns = g_cBs3ThresholdNativeRecompiler ? g_cBs3ThresholdNativeRecompiler : 1;
4704 unsigned iRecompRun;
4705
4706 /* make sure they're allocated */
4707 Bs3MemZero(&Ctx, sizeof(Ctx));
4708 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4709 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4710
4711 bs3CpuBasic2_SetGlobals(bMode);
4712
4713 /*
4714 * Create a context.
4715 *
4716 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
4717 */
4718 Bs3RegCtxSaveEx(&Ctx, bMode, 1664);
4719 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
4720 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4721
4722 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
4723 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
4724
4725 /*
4726 * 16-bit tests.
4727 */
4728 if (BS3_MODE_IS_16BIT_CODE(bMode))
4729 {
4730 static struct
4731 {
4732 bool fOpSizePfx;
4733 uint16_t cbImm;
4734 FPFNBS3FAR pfnTest;
4735 }
4736 const s_aTests[] =
4737 {
4738 { false, 0, bs3CpuBasic2_retn__ud2_c16, },
4739 { true, 0, bs3CpuBasic2_retn_opsize__ud2_c16, },
4740 { false, 24, bs3CpuBasic2_retn_i24__ud2_c16, },
4741 { true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c16, },
4742 { false, 0, bs3CpuBasic2_retn_i0__ud2_c16, },
4743 { true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c16, },
4744 { false, 760, bs3CpuBasic2_retn_i760__ud2_c16, },
4745 { false, 5193, bs3CpuBasic2_retn_i5193__ud2_c16, },
4746 { true, 5193, bs3CpuBasic2_retn_i5193_opsize__ud2_c16, },
4747 };
4748
4749 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4750 {
4751 uint8_t const BS3_FAR *fpbCode;
4752
4753 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4754 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4755 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4756 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4757 CtxExpected.cs = Ctx.cs;
4758 if (!s_aTests[iTest].fOpSizePfx)
4759 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4760 else
4761 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4762 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4763 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4764 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4765 {
4766 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4767 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4768 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4769 }
4770 g_usBs3TestStep++;
4771
4772 /* Again single stepping: */
4773 //Bs3TestPrintf("stepping...\n");
4774 Ctx.rflags.u16 |= X86_EFL_TF;
4775 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4776 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4777 {
4778 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4779 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4780 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4781 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4782 }
4783 Ctx.rflags.u16 &= ~X86_EFL_TF;
4784 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4785 g_usBs3TestStep++;
4786 }
4787 }
4788 /*
4789 * 32-bit tests.
4790 */
4791 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4792 {
4793 static struct
4794 {
4795 uint8_t cBits;
4796 bool fOpSizePfx;
4797 uint16_t cbImm;
4798 FPFNBS3FAR pfnTest;
4799 }
4800 const s_aTests[] =
4801 {
4802 { 32, false, 0, bs3CpuBasic2_retn__ud2_c32, },
4803 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c32, },
4804 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c32, },
4805 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c32, },
4806 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c32, },
4807 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c32, },
4808 { 32, false, 760, bs3CpuBasic2_retn_i760__ud2_c32, },
4809 { 32, false, 5193, bs3CpuBasic2_retn_i5193__ud2_c32, },
4810 { 32, true, 5193, bs3CpuBasic2_retn_i5193_opsize__ud2_c32, },
4811 };
4812
4813 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4814 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c32);
4815 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c32) - offLow;
4816 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4817 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4818 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4819 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4820 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4821 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4822 if (s_aTests[iTest].fOpSizePfx)
4823 {
4824 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4825 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4826 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4827 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4828 pbCode16[offUd + 1] = 0xf1;
4829 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4830 pbLow[offUd + 1] = 0x0b;
4831 }
4832
4833 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4834 {
4835 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4836
4837 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4838 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4839 CtxExpected.cs = Ctx.cs;
4840 if (!s_aTests[iTest].fOpSizePfx)
4841 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4842 else
4843 {
4844 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4845 CtxExpected.rip.u &= UINT16_MAX;
4846 }
4847 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4848 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4849 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4850 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4851 {
4852 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4853 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4854 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4855 }
4856 g_usBs3TestStep++;
4857
4858 /* Again single stepping: */
4859 //Bs3TestPrintf("stepping...\n");
4860 Ctx.rflags.u16 |= X86_EFL_TF;
4861 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4862 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4863 {
4864 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4865 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4866 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4867 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4868 }
4869 Ctx.rflags.u16 &= ~X86_EFL_TF;
4870 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4871 g_usBs3TestStep++;
4872 }
4873 }
4874 /*
4875 * 64-bit tests.
4876 */
4877 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4878 {
4879 static struct
4880 {
4881 uint8_t cBits;
4882 bool fOpSizePfx;
4883 uint16_t cbImm;
4884 FPFNBS3FAR pfnTest;
4885 }
4886 const s_aTests[] =
4887 {
4888 { 32, false, 0, bs3CpuBasic2_retn__ud2_c64, },
4889 { 32, false, 0, bs3CpuBasic2_retn_rexw__ud2_c64, },
4890 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c64, },
4891 { 32, false, 0, bs3CpuBasic2_retn_opsize_rexw__ud2_c64, },
4892 { 32, true, 0, bs3CpuBasic2_retn_rexw_opsize__ud2_c64, },
4893 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c64, },
4894 { 32, false, 24, bs3CpuBasic2_retn_i24_rexw__ud2_c64, },
4895 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c64, },
4896 { 32, false, 24, bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64, },
4897 { 32, true, 24, bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64, },
4898 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c64, },
4899 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c64, },
4900 { 32, false, 760, bs3CpuBasic2_retn_i760__ud2_c64, },
4901 { 32, false, 5193, bs3CpuBasic2_retn_i5193__ud2_c64, },
4902 { 32, false, 5193, bs3CpuBasic2_retn_i5193_rexw__ud2_c64, },
4903 { 32, true, 5193, bs3CpuBasic2_retn_i5193_opsize__ud2_c64, },
4904 { 32, false, 5193, bs3CpuBasic2_retn_i5193_opsize_rexw__ud2_c64, },
4905 { 32, true, 5193, bs3CpuBasic2_retn_i5193_rexw_opsize__ud2_c64, },
4906 };
4907 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4908 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4909
4910 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed
4911 tests, unless we're on intel where the opsize prefix is ignored. Here we
4912 just fill low memory with int3's so we can detect non-intel behaviour. */
4913 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c64);
4914 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c64) - offLow;
4915 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4916 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4917 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4918 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4919 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4920 if (!fFix64OpSize)
4921 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4922 if (s_aTests[iTest].fOpSizePfx)
4923 {
4924 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4925 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4926 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4927 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4928 pbCode16[offUd + 1] = 0xf1;
4929 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4930 pbLow[offUd + 1] = 0x0b;
4931 }
4932
4933 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4934 {
4935 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4936
4937 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4938 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4939 CtxExpected.cs = Ctx.cs;
4940 if (!s_aTests[iTest].fOpSizePfx || fFix64OpSize)
4941 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 8;
4942 else
4943 {
4944 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4945 CtxExpected.rip.u &= UINT16_MAX;
4946 }
4947 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4948 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4949 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4950 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4951 {
4952 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4953 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4954 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4955 }
4956 g_usBs3TestStep++;
4957
4958 /* Again single stepping: */
4959 //Bs3TestPrintf("stepping...\n");
4960 Ctx.rflags.u16 |= X86_EFL_TF;
4961 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4962 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4963 {
4964 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4965 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
4966 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4967 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4968 }
4969 Ctx.rflags.u16 &= ~X86_EFL_TF;
4970 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4971 g_usBs3TestStep++;
4972 }
4973 }
4974 else
4975 Bs3TestFailed("wtf?");
4976
4977 return 0;
4978}
4979
4980
4981/*********************************************************************************************************************************
4982* Far RET *
4983*********************************************************************************************************************************/
4984#define PROTO_ALL(a_Template) \
4985 FNBS3FAR a_Template ## _c16, \
4986 a_Template ## _c32, \
4987 a_Template ## _c64
4988PROTO_ALL(bs3CpuBasic2_retf);
4989PROTO_ALL(bs3CpuBasic2_retf_opsize);
4990FNBS3FAR bs3CpuBasic2_retf_rexw_c64;
4991FNBS3FAR bs3CpuBasic2_retf_rexw_opsize_c64;
4992FNBS3FAR bs3CpuBasic2_retf_opsize_rexw_c64;
4993PROTO_ALL(bs3CpuBasic2_retf_i32);
4994PROTO_ALL(bs3CpuBasic2_retf_i32_opsize);
4995FNBS3FAR bs3CpuBasic2_retf_i24_rexw_c64;
4996FNBS3FAR bs3CpuBasic2_retf_i24_rexw_opsize_c64;
4997FNBS3FAR bs3CpuBasic2_retf_i24_opsize_rexw_c64;
4998PROTO_ALL(bs3CpuBasic2_retf_i888);
4999#undef PROTO_ALL
5000
5001
5002static void bs3CpuBasic2_retf_PrepStack(BS3PTRUNION StkPtr, uint8_t cbStkItem, RTSEL uRetCs, uint64_t uRetRip,
5003 bool fWithStack, uint16_t cbImm, RTSEL uRetSs, uint64_t uRetRsp)
5004{
5005 Bs3MemSet(&StkPtr.pu32[-4], 0xff, 96);
5006 if (cbStkItem == 2)
5007 {
5008 StkPtr.pu16[0] = (uint16_t)uRetRip;
5009 StkPtr.pu16[1] = uRetCs;
5010 if (fWithStack)
5011 {
5012 StkPtr.pb += cbImm;
5013 StkPtr.pu16[2] = (uint16_t)uRetRsp;
5014 StkPtr.pu16[3] = uRetSs;
5015 }
5016 }
5017 else if (cbStkItem == 4)
5018 {
5019 StkPtr.pu32[0] = (uint32_t)uRetRip;
5020 StkPtr.pu16[2] = uRetCs;
5021 if (fWithStack)
5022 {
5023 StkPtr.pb += cbImm;
5024 StkPtr.pu32[2] = (uint32_t)uRetRsp;
5025 StkPtr.pu16[6] = uRetSs;
5026 }
5027 }
5028 else
5029 {
5030 StkPtr.pu64[0] = uRetRip;
5031 StkPtr.pu16[4] = uRetCs;
5032 if (fWithStack)
5033 {
5034 StkPtr.pb += cbImm;
5035 StkPtr.pu64[2] = uRetRsp;
5036 StkPtr.pu16[12] = uRetSs;
5037 }
5038 }
5039}
5040
5041
5042/**
5043 * Entrypoint for FAR RET tests.
5044 *
5045 * @returns 0 or BS3TESTDOMODE_SKIPPED.
5046 * @param bMode The CPU mode we're testing.
5047 */
5048BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_ret)(uint8_t bMode)
5049{
5050 BS3TRAPFRAME TrapCtx;
5051 BS3REGCTX Ctx;
5052 BS3REGCTX Ctx2;
5053 BS3REGCTX CtxExpected;
5054 unsigned iTest;
5055 unsigned iSubTest;
5056 BS3PTRUNION StkPtr;
5057
5058#define LOW_UD_ADDR 0x0609
5059 uint8_t BS3_FAR * const pbLowUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_UD_ADDR);
5060#define LOW_SALC_UD_ADDR 0x0611
5061 uint8_t BS3_FAR * const pbLowSalcUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_SALC_UD_ADDR);
5062#define LOW_SWAPGS_ADDR 0x061d
5063 uint8_t BS3_FAR * const pbLowSwapGs = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_SWAPGS_ADDR);
5064#define BS3TEXT16_ADDR_HI (BS3_ADDR_BS3TEXT16 >> 16)
5065
5066 /* make sure they're allocated */
5067 Bs3MemZero(&Ctx, sizeof(Ctx));
5068 Bs3MemZero(&Ctx2, sizeof(Ctx2));
5069 Bs3MemZero(&CtxExpected, sizeof(CtxExpected));
5070 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
5071
5072 bs3CpuBasic2_SetGlobals(bMode);
5073
5074 //if (!BS3_MODE_IS_64BIT_SYS(bMode) && bMode != BS3_MODE_PP32_16) return 0xff;
5075 //if (bMode != BS3_MODE_PE32_16) return 0xff;
5076
5077 /*
5078 * When dealing retf with 16-bit effective operand size to 32-bit or 64-bit
5079 * code, we're restricted to a 16-bit address. So, we plant a UD
5080 * instruction below 64KB that we can target with flat 32/64 code segments.
5081 * (Putting it on the stack would be possible too, but we'd have to create
5082 * the sub-test tables dynamically, which isn't necessary.)
5083 */
5084 Bs3MemSet(&pbLowUd[-9], 0xcc, 32);
5085 Bs3MemSet(&pbLowSalcUd[-9], 0xcc, 32);
5086 Bs3MemSet(&pbLowSwapGs[-9], 0xcc, 32);
5087
5088 pbLowUd[0] = 0x0f; /* ud2 */
5089 pbLowUd[1] = 0x0b;
5090
5091 /* A variation to detect whether we're in 64-bit or 16-bit mode when
5092 executing the code. */
5093 pbLowSalcUd[0] = 0xd6; /* salc */
5094 pbLowSalcUd[1] = 0x0f; /* ud2 */
5095 pbLowSalcUd[2] = 0x0b;
5096
5097 /* A variation to check that we're not in 64-bit mode. */
5098 pbLowSwapGs[0] = 0x0f; /* swapgs */
5099 pbLowSwapGs[1] = 0x01;
5100 pbLowSwapGs[2] = 0xf8;
5101
5102 /*
5103 * Use separate stacks for all relevant CPU exceptions so we can put
5104 * garbage in unused RSP bits w/o needing to care about where a long mode
5105 * handler will end up when accessing the whole RSP. (Not an issue with
5106 * 16-bit and 32-bit protected mode kernels, as here the weird SS based
5107 * stack pointer handling is in effect and the exception handler code
5108 * will just continue using the same SS and same portion of RSP.)
5109 *
5110 * See r154660.
5111 */
5112 if (BS3_MODE_IS_64BIT_SYS(bMode))
5113 Bs3Trap64InitEx(true);
5114
5115 /*
5116 * Create some call gates and whatnot for the UD2 code using the spare selectors.
5117 */
5118 if (BS3_MODE_IS_64BIT_SYS(bMode))
5119 for (iTest = 0; iTest < 16; iTest++)
5120 Bs3SelSetupGate64(&Bs3GdteSpare00 + iTest * 2, iTest /*bType*/, 3 /*bDpl*/,
5121 BS3_SEL_R0_CS64, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16);
5122 else
5123 {
5124 for (iTest = 0; iTest < 16; iTest++)
5125 {
5126 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest, iTest /*bType*/, 3 /*bDpl*/,
5127 BS3_SEL_R0_CS16, BS3_FP_OFF(bs3CpuBasic2_ud2), 0);
5128 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest + 16, iTest /*bType*/, 3 /*bDpl*/,
5129 BS3_SEL_R0_CS32, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16, 0);
5130 }
5131 }
5132
5133 /*
5134 * Create a context.
5135 *
5136 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
5137 */
5138 Bs3RegCtxSaveEx(&Ctx, bMode, 1728);
5139 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
5140 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
5141
5142 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
5143 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
5144
5145 /*
5146 * 16-bit tests.
5147 */
5148 if (BS3_MODE_IS_16BIT_CODE(bMode))
5149 {
5150 static struct
5151 {
5152 bool fOpSizePfx;
5153 uint16_t cbImm;
5154 FPFNBS3FAR pfnTest;
5155 } const s_aTests[] =
5156 {
5157 { false, 0, bs3CpuBasic2_retf_c16, },
5158 { true, 0, bs3CpuBasic2_retf_opsize_c16, },
5159 { false, 32, bs3CpuBasic2_retf_i32_c16, },
5160 { true, 32, bs3CpuBasic2_retf_i32_opsize_c16, },
5161 { false,888, bs3CpuBasic2_retf_i888_c16, },
5162 };
5163
5164 static struct
5165 {
5166 bool fRmOrV86;
5167 bool fInterPriv;
5168 int8_t iXcpt;
5169 RTSEL uStartSs;
5170 uint8_t cDstBits;
5171 RTSEL uDstCs;
5172 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5173 {
5174 uint32_t offDst;
5175 struct
5176 {
5177 NPVOID pv;
5178 uint16_t uHigh;
5179 } s;
5180 };
5181 RTSEL uDstSs;
5182 uint16_t uErrCd;
5183 } const s_aSubTests[] =
5184 { /* rm/v86, PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5185 { true, false, -1, 0, 16, BS3_SEL_TEXT16, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, 0, 0 },
5186 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_TEXT16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5187 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5188 { false, false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5189 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5190 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5191 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5192 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5193 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5194 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5195 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5196 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5197 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5198 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5199 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5200 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5201 /* conforming stuff */
5202 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5203 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5204 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5205 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5206 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5207 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS16_CNF },
5208 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R1_CS16_CNF },
5209 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5210 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5211 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5212 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5213 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5214 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5215 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5216 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5217 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5218 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5219 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5220 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5221 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5222 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5223 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5224 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5225 /* returning to 32-bit code: */
5226 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5227 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS16 | 0, 0 },
5228 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5229 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5230 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5231 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5232 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5233 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5234 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5235 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5236 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5237 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5238 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5239 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5240 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5241 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5242 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5243 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5244 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5245 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5246 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5247 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5248 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5249 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5250 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5251 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5252 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5253 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5254 /* returning to 32-bit conforming code: */
5255 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5256 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5257 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5258 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5259 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5260 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5261 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5262 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5263 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS32_CNF },
5264 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5265 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5266 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5267 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5268 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5269 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5270 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5271 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS32_CNF },
5272 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS32_CNF },
5273 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5274 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5275 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS32_CNF },
5276 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS32_CNF },
5277 { false, true, 42, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS32_CNF },
5278 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5279 /* returning to 64-bit code or 16-bit when not in long mode: */
5280 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5281 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5282 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5283 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5284 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5285 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5286 { false, false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5287 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5288 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5289 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5290 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5291 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5292 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5293 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5294 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5295 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5296 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5297 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5298 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5299 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5300 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5301 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5302
5303 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5304 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5305 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5306 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5307 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5308 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5309 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5310
5311 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5312 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5313 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5314 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5315
5316 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5317 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5318 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5319 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5320
5321 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5322 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5323 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_TSS32_DF | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_TSS32_DF },
5324 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_00 },
5325 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_01 },
5326 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_02 },
5327 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_03 },
5328 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_04 },
5329 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_05 },
5330 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_06 },
5331 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_07 },
5332 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_08 },
5333 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_09 },
5334 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0a },
5335 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0b },
5336 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0c },
5337 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0d },
5338 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0e },
5339 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0f },
5340 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_10 },
5341 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_11 },
5342 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_12 },
5343 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_13 },
5344 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_14 },
5345 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_15 },
5346 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_16 },
5347 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_17 },
5348 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_18 },
5349 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_19 },
5350 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1a },
5351 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1b },
5352 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1c },
5353 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1d },
5354 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1e },
5355 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1f },
5356 };
5357
5358 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
5359 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
5360
5361 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5362 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5363 {
5364 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5365
5366 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5367 {
5368 g_usBs3TestStep = (iTest << 12) | (iSubTest << 4);
5369 if ( s_aSubTests[iSubTest].fRmOrV86 == fRmOrV86
5370 && (s_aSubTests[iSubTest].offDst <= UINT16_MAX || s_aTests[iTest].fOpSizePfx))
5371 {
5372 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5373 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 4 : 2;
5374 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5375 uint32_t const uFlatDst = Bs3SelFar32ToFlat32(s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstCs)
5376 + (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode));
5377 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5378 uint64_t uDstRspExpect, uDstRspPush;
5379 uint16_t cErrors;
5380
5381 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5382 if (Ctx.ss != BS3_SEL_R0_SS32)
5383 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5384 else
5385 Ctx.rsp.u32 &= UINT16_MAX;
5386 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5387 if (s_aSubTests[iSubTest].fInterPriv)
5388 {
5389 if (s_aTests[iTest].fOpSizePfx)
5390 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5391 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5392 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5393 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5394 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5395 {
5396 if (s_aTests[iTest].fOpSizePfx)
5397 uDstRspExpect = uDstRspPush;
5398 else
5399 uDstRspExpect &= UINT16_MAX;
5400 }
5401 }
5402
5403 CtxExpected.bCpl = Ctx.bCpl;
5404 CtxExpected.cs = Ctx.cs;
5405 CtxExpected.ss = Ctx.ss;
5406 CtxExpected.ds = Ctx.ds;
5407 CtxExpected.es = Ctx.es;
5408 CtxExpected.fs = Ctx.fs;
5409 CtxExpected.gs = Ctx.gs;
5410 CtxExpected.rip.u = Ctx.rip.u;
5411 CtxExpected.rsp.u = Ctx.rsp.u;
5412 CtxExpected.rax.u = Ctx.rax.u;
5413 if (s_aSubTests[iSubTest].iXcpt < 0)
5414 {
5415 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
5416 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
5417 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5418 {
5419 CtxExpected.rip.u += 1;
5420 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
5421 }
5422 CtxExpected.ss = uDstSs;
5423 CtxExpected.rsp.u = uDstRspExpect;
5424 if (s_aSubTests[iSubTest].fInterPriv)
5425 {
5426 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
5427 unsigned cSels = 4;
5428 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
5429 while (cSels-- > 0)
5430 {
5431 uint16_t uSel = *puSel;
5432 if ( (uSel & X86_SEL_MASK_OFF_RPL)
5433 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
5434 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5435 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5436 *puSel = 0;
5437 puSel++;
5438 }
5439 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
5440 }
5441 }
5442 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5443 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
5444 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]\n", Ctx.ss, Ctx.rsp.u, CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush);
5445 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5446 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5447 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5448 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
5449 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
5450 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5451 if (s_aSubTests[iSubTest].iXcpt < 0)
5452 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5453 else
5454 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5455 g_usBs3TestStep++; /* 1 */
5456
5457 /* Bad hw bp: Setup DR0-3 but use invalid length encodings (non-byte) */
5458 //Bs3TestPrintf("hw bp: bad len\n");
5459 Bs3RegSetDr0(uFlatDst);
5460 Bs3RegSetDr1(uFlatDst);
5461 Bs3RegSetDr2(uFlatDst);
5462 Bs3RegSetDr3(uFlatDst);
5463 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5464 Bs3RegSetDr7(X86_DR7_INIT_VAL
5465 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_WORD) | X86_DR7_L_G(1)
5466 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_DWORD) | X86_DR7_L_G(2)
5467 | ( BS3_MODE_IS_64BIT_SYS(bMode)
5468 ? X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_QWORD) | X86_DR7_L_G(3) : 0) );
5469 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5470 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5471 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5472 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5473 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5474 if (s_aSubTests[iSubTest].iXcpt < 0)
5475 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5476 else
5477 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5478 bs3CpuBasic2_CheckDr6InitVal();
5479 g_usBs3TestStep++; /* 2 */
5480
5481 /* Bad hw bp: setup DR0-3 but don't enable them */
5482 //Bs3TestPrintf("hw bp: disabled\n");
5483 //Bs3RegSetDr0(uFlatDst);
5484 //Bs3RegSetDr1(uFlatDst);
5485 //Bs3RegSetDr2(uFlatDst);
5486 //Bs3RegSetDr3(uFlatDst);
5487 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5488 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5489 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5490 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5491 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5492 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5493 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5494 if (s_aSubTests[iSubTest].iXcpt < 0)
5495 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5496 else
5497 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5498 bs3CpuBasic2_CheckDr6InitVal();
5499 g_usBs3TestStep++; /* 3 */
5500
5501 /* Bad hw bp: Points at 2nd byte in the UD2. Docs says it only works when pointing at first byte. */
5502 //Bs3TestPrintf("hw bp: byte 2\n");
5503 Bs3RegSetDr0(uFlatDst + 1);
5504 Bs3RegSetDr1(uFlatDst + 1);
5505 //Bs3RegSetDr2(uFlatDst);
5506 //Bs3RegSetDr3(uFlatDst);
5507 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5508 Bs3RegSetDr7(X86_DR7_INIT_VAL
5509 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_L_G(0)
5510 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1));
5511 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5512 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5513 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5514 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5515 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5516 if (s_aSubTests[iSubTest].iXcpt < 0)
5517 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5518 else
5519 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5520 bs3CpuBasic2_CheckDr6InitVal();
5521 g_usBs3TestStep++; /* 4 */
5522
5523 /* Again with two correctly hardware breakpoints and a disabled one that just matches the address: */
5524 //Bs3TestPrintf("bp 1 + 3...\n");
5525 Bs3RegSetDr0(uFlatDst);
5526 Bs3RegSetDr1(uFlatDst);
5527 Bs3RegSetDr2(0);
5528 Bs3RegSetDr3(uFlatDst);
5529 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5530 Bs3RegSetDr7(X86_DR7_INIT_VAL
5531 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5532 | X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_L_G(3) );
5533 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5534 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5535 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5536 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5537 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5538 if (s_aSubTests[iSubTest].iXcpt < 0)
5539 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected,
5540 enmCpuVendor == BS3CPUVENDOR_AMD ? X86_DR6_B1 | X86_DR6_B3 /* 3990x */
5541 : X86_DR6_B0 | X86_DR6_B1 | X86_DR6_B3);
5542 else
5543 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5544 g_usBs3TestStep++; /* 5 */
5545
5546 /* Again with a single locally enabled breakpoint. */
5547 //Bs3TestPrintf("bp 0/l...\n");
5548 Bs3RegSetDr0(uFlatDst);
5549 Bs3RegSetDr1(0);
5550 Bs3RegSetDr2(0);
5551 Bs3RegSetDr3(0);
5552 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_B1 | X86_DR6_B2 | X86_DR6_B3 | X86_DR6_BS);
5553 Bs3RegSetDr7(X86_DR7_INIT_VAL
5554 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_L(0));
5555 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5556 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5557 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5558 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5559 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5560 if (s_aSubTests[iSubTest].iXcpt < 0)
5561 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_B0 | X86_DR6_BS); /* B0-B3 set, BS preserved */
5562 else
5563 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5564 g_usBs3TestStep++; /* 6 */
5565
5566 /* Again with a single globally enabled breakpoint and serveral other types of breakpoints
5567 configured but not enabled. */
5568 //Bs3TestPrintf("bp 2/g+...\n");
5569 cErrors = Bs3TestSubErrorCount();
5570 Bs3RegSetDr0(uFlatDst);
5571 Bs3RegSetDr1(uFlatDst);
5572 Bs3RegSetDr2(uFlatDst);
5573 Bs3RegSetDr3(uFlatDst);
5574 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_BS | X86_DR6_BD | X86_DR6_BT | X86_DR6_B2);
5575 Bs3RegSetDr7(X86_DR7_INIT_VAL
5576 | X86_DR7_RW(0, X86_DR7_RW_RW) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE)
5577 | X86_DR7_RW(1, X86_DR7_RW_RW) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5578 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_BYTE) | X86_DR7_G(2)
5579 | X86_DR7_RW(3, X86_DR7_RW_WO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_G(3)
5580 );
5581 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5582 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5583 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5584 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5585 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5586 if (s_aSubTests[iSubTest].iXcpt < 0)
5587 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_B2 | X86_DR6_BS | X86_DR6_BD | X86_DR6_BT);
5588 else
5589 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5590 g_usBs3TestStep++; /* 7 */
5591
5592 /* Now resume it with lots of execution breakpoints configured. */
5593 if (s_aSubTests[iSubTest].iXcpt < 0 && Bs3TestSubErrorCount() == cErrors)
5594 {
5595 Bs3MemCpy(&Ctx2, &TrapCtx.Ctx, sizeof(Ctx2));
5596 Ctx2.rflags.u32 |= X86_EFL_RF;
5597 //Bs3TestPrintf("bp 3/g+rf %04RX16:%04RX64 efl=%RX32 ds=%04RX16...\n", Ctx2.cs, Ctx2.rip.u, Ctx2.rflags.u32, Ctx2.ds);
5598 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5599 Bs3RegSetDr7(X86_DR7_INIT_VAL
5600 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE)
5601 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5602 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_BYTE) | X86_DR7_G(2)
5603 | X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_G(3)
5604 );
5605 Bs3TrapSetJmpAndRestore(&Ctx2, &TrapCtx);
5606 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5607 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5608 bs3CpuBasic2_CheckDr6InitVal();
5609 }
5610 g_usBs3TestStep++; /* 8 */
5611
5612 /* Now do single stepping: */
5613 //Bs3TestPrintf("stepping...\n");
5614 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5615 Ctx.rflags.u16 |= X86_EFL_TF;
5616 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5617 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5618 {
5619 CtxExpected.rip.u -= 1;
5620 CtxExpected.rax.u = Ctx.rax.u;
5621 }
5622 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5623 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5624 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5625 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5626 if (s_aSubTests[iSubTest].iXcpt < 0)
5627 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5628 else
5629 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5630 Ctx.rflags.u16 &= ~X86_EFL_TF;
5631 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5632 g_usBs3TestStep++; /* 9 */
5633
5634 /* Single step with B0-B3 set to check that they're not preserved
5635 and with BD & BT to check that they are (checked on Intel 6700K): */
5636 //Bs3TestPrintf("stepping b0-b3+bd+bt=1...\n");
5637 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_B_MASK | X86_DR6_BD | X86_DR6_BT);
5638 Ctx.rflags.u16 |= X86_EFL_TF;
5639 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5640 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5641 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5642 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5643 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5644 if (s_aSubTests[iSubTest].iXcpt < 0)
5645 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS | X86_DR6_BD | X86_DR6_BT);
5646 else
5647 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5648 Ctx.rflags.u16 &= ~X86_EFL_TF;
5649 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5650 g_usBs3TestStep++; /* 10 */
5651
5652 }
5653 }
5654 }
5655 }
5656 /*
5657 * 32-bit tests.
5658 */
5659 else if (BS3_MODE_IS_32BIT_CODE(bMode))
5660 {
5661 static struct
5662 {
5663 bool fOpSizePfx;
5664 uint16_t cbImm;
5665 FPFNBS3FAR pfnTest;
5666 } const s_aTests[] =
5667 {
5668 { false, 0, bs3CpuBasic2_retf_c32, },
5669 { true, 0, bs3CpuBasic2_retf_opsize_c32, },
5670 { false, 32, bs3CpuBasic2_retf_i32_c32, },
5671 { true, 32, bs3CpuBasic2_retf_i32_opsize_c32, },
5672 { false,888, bs3CpuBasic2_retf_i888_c32, },
5673 };
5674
5675 static struct
5676 {
5677 bool fInterPriv;
5678 int8_t iXcpt;
5679 RTSEL uStartSs;
5680 uint8_t cDstBits;
5681 RTSEL uDstCs;
5682 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5683 {
5684 uint32_t offDst;
5685 struct
5686 {
5687 NPVOID pv;
5688 uint16_t uHigh;
5689 } s;
5690 };
5691 RTSEL uDstSs;
5692 uint16_t uErrCd;
5693 } const s_aSubTests[] =
5694 { /* PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5695 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5696 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5697 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5698 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5699 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5700 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5701 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5702 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5703 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5704 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5705 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5706 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5707 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5708 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5709 /* same with 32-bit wide target addresses: */
5710 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5711 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5712 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5713 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5714 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5715 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5716 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5717 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5718 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5719 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5720 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5721 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5722 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5723 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5724 /* conforming stuff */
5725 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5726 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5727 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5728 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5729 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5730 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS32_CNF },
5731 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R1_CS32_CNF },
5732 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5733 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5734 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5735 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5736 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5737 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5738 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5739 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5740 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5741 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5742 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5743 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5744 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5745 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5746 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5747 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5748 /* returning to 16-bit code: */
5749 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5750 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5751 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5752 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5753 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5754 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5755 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5756 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS16 | 0, 0 },
5757 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5758 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5759 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5760 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5761 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5762 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5763 /* returning to 16-bit conforming code: */
5764 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5765 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5766 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5767 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5768 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5769 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5770 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5771 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5772 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS16_CNF },
5773 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5774 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5775 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5776 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5777 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5778 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5779 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5780 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS16_CNF },
5781 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS16_CNF },
5782 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5783 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5784 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS16_CNF },
5785 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS16_CNF },
5786 { true, 42, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS16_CNF },
5787 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5788 /* returning to 64-bit code or 16-bit when not in long mode: */
5789 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5790 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5791 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5792 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5793 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5794 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5795 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5796 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5797 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5798 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5799 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5800 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5801 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5802 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5803 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5804 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5805 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5806 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5807 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5808 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5809 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5810 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5811
5812 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5813 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5814 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5815 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5816 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5817 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5818 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5819
5820 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5821 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5822 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5823 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5824
5825 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5826 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5827 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5828 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5829
5830 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5831 { true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5832 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_00 },
5833 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_01 },
5834 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_02 },
5835 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_03 },
5836 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_04 },
5837 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_05 },
5838 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_06 },
5839 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_07 },
5840 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_08 },
5841 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_09 },
5842 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0a },
5843 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0b },
5844 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0c },
5845 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0d },
5846 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0e },
5847 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0f },
5848 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_10 },
5849 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_11 },
5850 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_12 },
5851 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_13 },
5852 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_14 },
5853 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_15 },
5854 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_16 },
5855 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_17 },
5856 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_18 },
5857 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_19 },
5858 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1a },
5859 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1b },
5860 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1c },
5861 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1d },
5862 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1e },
5863 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1f },
5864 };
5865
5866 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5867 {
5868 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5869 //Bs3TestPrintf("-------------- #%u: cs:eip=%04RX16:%08RX64 imm=%u%s\n",
5870 // iTest, Ctx.cs, Ctx.rip.u, s_aTests[iTest].cbImm, s_aTests[iTest].fOpSizePfx ? " o16" : "");
5871
5872 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5873 {
5874 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
5875 if (!s_aTests[iTest].fOpSizePfx || s_aSubTests[iSubTest].offDst <= UINT16_MAX)
5876 {
5877 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5878 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 2 : 4;
5879 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5880 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5881 uint64_t uDstRspExpect, uDstRspPush;
5882 //Bs3TestPrintf(" #%u: %s %d %#04RX16 -> %u %#04RX16:%#04RX32 %#04RX16 %#RX16\n", iSubTest, s_aSubTests[iSubTest].fInterPriv ? "priv" : "same", s_aSubTests[iSubTest].iXcpt, s_aSubTests[iSubTest].uStartSs,
5883 // s_aSubTests[iSubTest].cDstBits, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstSs, s_aSubTests[iSubTest].uErrCd);
5884
5885 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5886 if (Ctx.ss != BS3_SEL_R0_SS32)
5887 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5888 else
5889 Ctx.rsp.u32 &= UINT16_MAX;
5890 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5891 if (s_aSubTests[iSubTest].fInterPriv)
5892 {
5893 if (!s_aTests[iTest].fOpSizePfx)
5894 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5895 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5896 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5897 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5898 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5899 {
5900 if (!s_aTests[iTest].fOpSizePfx)
5901 uDstRspExpect = uDstRspPush;
5902 else
5903 uDstRspExpect &= UINT16_MAX;
5904 }
5905 }
5906
5907 CtxExpected.bCpl = Ctx.bCpl;
5908 CtxExpected.cs = Ctx.cs;
5909 CtxExpected.ss = Ctx.ss;
5910 CtxExpected.ds = Ctx.ds;
5911 CtxExpected.es = Ctx.es;
5912 CtxExpected.fs = Ctx.fs;
5913 CtxExpected.gs = Ctx.gs;
5914 CtxExpected.rip.u = Ctx.rip.u;
5915 CtxExpected.rsp.u = Ctx.rsp.u;
5916 CtxExpected.rax.u = Ctx.rax.u;
5917 if (s_aSubTests[iSubTest].iXcpt < 0)
5918 {
5919 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
5920 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
5921 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5922 {
5923 CtxExpected.rip.u += 1;
5924 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
5925 }
5926 CtxExpected.ss = uDstSs;
5927 CtxExpected.rsp.u = uDstRspExpect;
5928 if (s_aSubTests[iSubTest].fInterPriv)
5929 {
5930 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
5931 unsigned cSels = 4;
5932 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
5933 while (cSels-- > 0)
5934 {
5935 uint16_t uSel = *puSel;
5936 if ( (uSel & X86_SEL_MASK_OFF_RPL)
5937 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
5938 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5939 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5940 *puSel = 0;
5941 puSel++;
5942 }
5943 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
5944 }
5945 }
5946 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5947 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]; %04RX16:%04RX64\n",Ctx.ss, Ctx.rsp.u,
5948 // CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush, CtxExpected.cs, CtxExpected.rip.u);
5949 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5950 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5951 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5952 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
5953 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
5954 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5955 if (s_aSubTests[iSubTest].iXcpt < 0)
5956 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5957 else
5958 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5959 g_usBs3TestStep++;
5960
5961 /* Again single stepping: */
5962 //Bs3TestPrintf("stepping...\n");
5963 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5964 Ctx.rflags.u16 |= X86_EFL_TF;
5965 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5966 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5967 {
5968 CtxExpected.rip.u -= 1;
5969 CtxExpected.rax.u = Ctx.rax.u;
5970 }
5971 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5972 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5973 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5974 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5975 if (s_aSubTests[iSubTest].iXcpt < 0)
5976 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5977 else
5978 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5979 Ctx.rflags.u16 &= ~X86_EFL_TF;
5980 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5981 g_usBs3TestStep++;
5982 }
5983 }
5984 }
5985 }
5986 /*
5987 * 64-bit tests.
5988 */
5989 else if (BS3_MODE_IS_64BIT_CODE(bMode))
5990 {
5991 static struct
5992 {
5993 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W; Effective op size prefix. */
5994 uint16_t cbImm;
5995 FPFNBS3FAR pfnTest;
5996 } const s_aTests[] =
5997 {
5998 { 0, 0, bs3CpuBasic2_retf_c64, },
5999 { 1, 0, bs3CpuBasic2_retf_opsize_c64, },
6000 { 0, 32, bs3CpuBasic2_retf_i32_c64, },
6001 { 1, 32, bs3CpuBasic2_retf_i32_opsize_c64, },
6002 { 2, 0, bs3CpuBasic2_retf_rexw_c64, },
6003 { 2, 0, bs3CpuBasic2_retf_opsize_rexw_c64, },
6004 { 1, 0, bs3CpuBasic2_retf_rexw_opsize_c64, },
6005 { 2, 24, bs3CpuBasic2_retf_i24_rexw_c64, },
6006 { 2, 24, bs3CpuBasic2_retf_i24_opsize_rexw_c64, },
6007 { 1, 24, bs3CpuBasic2_retf_i24_rexw_opsize_c64, },
6008 { 0,888, bs3CpuBasic2_retf_i888_c64, },
6009 };
6010
6011 static struct
6012 {
6013 bool fInterPriv;
6014 int8_t iXcpt;
6015 RTSEL uStartSs;
6016 uint8_t cDstBits;
6017 RTSEL uDstCs;
6018 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
6019 {
6020 uint32_t offDst;
6021 struct
6022 {
6023 NPVOID pv;
6024 uint16_t uHigh;
6025 } s;
6026 };
6027 RTSEL uDstSs;
6028 uint16_t uErrCd;
6029 } const s_aSubTests[] =
6030 { /* PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
6031 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6032 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6033 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6034 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6035 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6036 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6037 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6038 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6039 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6040 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6041 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6042 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6043 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6044 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6045 /* same with 32-bit wide target addresses: */
6046 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
6047 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
6048 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6049 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6050 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6051 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6052 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6053 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6054 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6055 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6056 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6057 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6058 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6059 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6060 /* conforming stuff */
6061 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6062 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6063 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6064 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6065 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6066 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS64_CNF },
6067 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R1_CS64_CNF },
6068 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6069 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6070 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6071 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS64_CNF },
6072 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS64_CNF },
6073 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS64_CNF },
6074 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS64_CNF },
6075 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6076 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6077 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS64_CNF },
6078 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS64_CNF },
6079 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS64_CNF },
6080 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS64_CNF },
6081 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS64_CNF },
6082 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_CS64_CNF },
6083 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6084 /* returning to 16-bit code: */
6085 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
6086 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6087 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6088 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6089 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6090 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6091 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6092 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS16 | 0, 0 },
6093 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6094 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6095 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6096 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6097 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6098 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6099 /* returning to 16-bit conforming code: */
6100 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
6101 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6102 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6103 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6104 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6105 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6106 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6107 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6108 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS16_CNF },
6109 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6110 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6111 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6112 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6113 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6114 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6115 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6116 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS16_CNF },
6117 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS16_CNF },
6118 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6119 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6120 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS16_CNF },
6121 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS16_CNF },
6122 { true, 42, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS16_CNF },
6123 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6124 /* returning to 32-bit code - narrow 16-bit target address: */
6125 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6126 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6127 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6128 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6129 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6130 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6131 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6132 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
6133 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6134 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6135 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6136 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6137 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6138 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6139 /* returning to 32-bit code - wider 32-bit target address: */
6140 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6141 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6142 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6143 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6144 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6145 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6146 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS16 | 0, 0 },
6147 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6148 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6149 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6150 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6151 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6152 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6153 /* returning to 32-bit conforming code: */
6154 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
6155 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6156 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6157 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6158 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6159 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6160 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6161 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6162 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS32_CNF },
6163 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6164 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6165 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6166 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6167 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6168 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6169 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6170 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
6171 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
6172 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6173 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6174 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
6175 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
6176 { true, 42, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS32_CNF },
6177 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6178
6179 /* some additional #GP variations */ /** @todo test all possible exceptions! */
6180 { true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
6181
6182 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_00 },
6183 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_02 },
6184 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_04 },
6185 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_06 },
6186 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_08 },
6187 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0a },
6188 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0c },
6189 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0e },
6190 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_10 },
6191 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_12 },
6192 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_14 },
6193 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_16 },
6194 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_18 },
6195 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1a },
6196 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1c },
6197 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1e },
6198 };
6199
6200 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
6201 {
6202 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
6203 //Bs3TestPrintf("-------------- #%u: cs:eip=%04RX16:%08RX64 imm=%u%s\n", iTest, Ctx.cs, Ctx.rip.u, s_aTests[iTest].cbImm,
6204 // s_aTests[iTest].fOpSizePfx == 1 ? " o16" : s_aTests[iTest].fOpSizePfx == 2 ? " o64" : "");
6205
6206 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
6207 {
6208 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
6209 if (s_aTests[iTest].fOpSizePfx != 1 || s_aSubTests[iSubTest].offDst <= UINT16_MAX)
6210 {
6211 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
6212 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx == 2 ? 8 : s_aTests[iTest].fOpSizePfx == 0 ? 4 : 2;
6213 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
6214 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
6215 uint64_t uDstRspExpect, uDstRspPush;
6216 //Bs3TestPrintf(" #%u: %s %d %#04RX16 -> %u %#04RX16:%#04RX32 %#04RX16 %#RX16\n", iSubTest, s_aSubTests[iSubTest].fInterPriv ? "priv" : "same", s_aSubTests[iSubTest].iXcpt, s_aSubTests[iSubTest].uStartSs,
6217 // s_aSubTests[iSubTest].cDstBits, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstSs, s_aSubTests[iSubTest].uErrCd);
6218
6219 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
6220 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
6221 if (s_aSubTests[iSubTest].fInterPriv)
6222 {
6223 if (s_aTests[iTest].fOpSizePfx != 1)
6224 {
6225 if (s_aTests[iTest].fOpSizePfx == 2)
6226 uDstRspPush |= UINT64_C(0xf00dfaceacdc0000);
6227 else
6228 uDstRspPush |= UINT32_C(0xacdc0000);
6229 if (s_aSubTests[iSubTest].cDstBits == 64)
6230 uDstRspExpect = uDstRspPush;
6231 else if (!BS3_SEL_IS_SS16(uDstSs))
6232 uDstRspExpect = (uint32_t)uDstRspPush;
6233 }
6234 }
6235
6236 CtxExpected.bCpl = Ctx.bCpl;
6237 CtxExpected.cs = Ctx.cs;
6238 CtxExpected.ss = Ctx.ss;
6239 CtxExpected.ds = Ctx.ds;
6240 CtxExpected.es = Ctx.es;
6241 CtxExpected.fs = Ctx.fs;
6242 CtxExpected.gs = Ctx.gs;
6243 CtxExpected.rip.u = Ctx.rip.u;
6244 CtxExpected.rsp.u = Ctx.rsp.u;
6245 CtxExpected.rax.u = Ctx.rax.u;
6246 if (s_aSubTests[iSubTest].iXcpt < 0)
6247 {
6248 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
6249 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
6250 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6251 {
6252 CtxExpected.rip.u += 1;
6253 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
6254 }
6255 CtxExpected.ss = uDstSs;
6256 CtxExpected.rsp.u = uDstRspExpect;
6257 if (s_aSubTests[iSubTest].fInterPriv)
6258 {
6259 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
6260 unsigned cSels = 4;
6261 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
6262 while (cSels-- > 0)
6263 {
6264 uint16_t uSel = *puSel;
6265 if ( (uSel & X86_SEL_MASK_OFF_RPL)
6266 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
6267 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6268 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6269 *puSel = 0;
6270 puSel++;
6271 }
6272 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
6273 }
6274 }
6275 g_uBs3TrapEipHint = CtxExpected.rip.u32;
6276 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]; %04RX16:%04RX64\n",Ctx.ss, Ctx.rsp.u,
6277 // CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush, CtxExpected.cs, CtxExpected.rip.u);
6278 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6279 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6280 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6281 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
6282 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
6283 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6284 if (s_aSubTests[iSubTest].iXcpt < 0)
6285 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
6286 else
6287 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6288 g_usBs3TestStep++;
6289
6290 /* Again single stepping: */
6291 //Bs3TestPrintf("stepping...\n");
6292 Bs3RegSetDr6(X86_DR6_INIT_VAL);
6293 Ctx.rflags.u16 |= X86_EFL_TF;
6294 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6295 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6296 {
6297 CtxExpected.rip.u -= 1;
6298 CtxExpected.rax.u = Ctx.rax.u;
6299 }
6300 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6301 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6302 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6303 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6304 if (s_aSubTests[iSubTest].iXcpt < 0)
6305 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
6306 else
6307 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6308 Ctx.rflags.u16 &= ~X86_EFL_TF;
6309 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6310 g_usBs3TestStep++;
6311 }
6312 }
6313 }
6314 }
6315 else
6316 Bs3TestFailed("wtf?");
6317
6318 if (BS3_MODE_IS_64BIT_SYS(bMode))
6319 Bs3TrapReInit();
6320 return 0;
6321}
6322
6323
6324
6325/*********************************************************************************************************************************
6326* Instruction Length *
6327*********************************************************************************************************************************/
6328
6329
6330static uint8_t bs3CpuBasic2_instr_len_Worker(uint8_t bMode, uint8_t BS3_FAR *pbCodeBuf)
6331{
6332 BS3TRAPFRAME TrapCtx;
6333 BS3REGCTX Ctx;
6334 BS3REGCTX CtxExpected;
6335 uint32_t uEipBase;
6336 unsigned cbInstr;
6337 unsigned off;
6338
6339 /* Make sure they're allocated and all zeroed. */
6340 Bs3MemZero(&Ctx, sizeof(Ctx));
6341 Bs3MemZero(&CtxExpected, sizeof(Ctx));
6342 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
6343
6344 /*
6345 * Create a context.
6346 *
6347 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
6348 */
6349 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
6350 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, (FPFNBS3FAR)pbCodeBuf);
6351 uEipBase = Ctx.rip.u32;
6352
6353 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
6354
6355 /*
6356 * Simple stuff crossing the page.
6357 */
6358 for (off = X86_PAGE_SIZE - 32; off <= X86_PAGE_SIZE + 16; off++)
6359 {
6360 Ctx.rip.u32 = uEipBase + off;
6361 for (cbInstr = 0; cbInstr < 24; cbInstr++)
6362 {
6363 /*
6364 * Generate the instructions:
6365 * [es] nop
6366 * ud2
6367 */
6368 if (cbInstr > 0)
6369 {
6370 Bs3MemSet(&pbCodeBuf[off], 0x26 /* es */, cbInstr);
6371 pbCodeBuf[off + cbInstr - 1] = 0x90; /* nop */
6372 }
6373 pbCodeBuf[off + cbInstr + 0] = 0x0f; /* ud2 */
6374 pbCodeBuf[off + cbInstr + 1] = 0x0b;
6375
6376 /*
6377 * Test it.
6378 */
6379 if (cbInstr < 16)
6380 CtxExpected.rip.u32 = Ctx.rip.u32 + cbInstr;
6381 else
6382 CtxExpected.rip.u32 = Ctx.rip.u32;
6383 g_uBs3TrapEipHint = CtxExpected.rip.u32;
6384 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6385 if (cbInstr < 16)
6386 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
6387 else
6388 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
6389 }
6390 pbCodeBuf[off] = 0xf1; /* icebp */
6391 }
6392
6393 /*
6394 * Pit instruction length violations against the segment limit (#GP).
6395 */
6396 if (!BS3_MODE_IS_RM_OR_V86(bMode) && bMode != BS3_MODE_LM64)
6397 {
6398 /** @todo */
6399 }
6400
6401 /*
6402 * Pit instruction length violations against an invalid page (#PF).
6403 */
6404 if (BS3_MODE_IS_PAGED(bMode))
6405 {
6406 /** @todo */
6407 }
6408
6409 return 0;
6410}
6411
6412
6413/**
6414 * Entrypoint for FAR RET tests.
6415 *
6416 * @returns 0 or BS3TESTDOMODE_SKIPPED.
6417 * @param bMode The CPU mode we're testing.
6418 */
6419BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_instr_len)(uint8_t bMode)
6420{
6421 /*
6422 * Allocate three pages so we can straddle an instruction across the
6423 * boundrary for testing special IEM cases, with the last page being
6424 * made in accessible and useful for pitting #PF against #GP.
6425 */
6426 uint8_t BS3_FAR * const pbCodeBuf = (uint8_t BS3_FAR *)Bs3MemAlloc(BS3MEMKIND_REAL, X86_PAGE_SIZE * 3);
6427 //Bs3TestPrintf("pbCodeBuf=%p\n", pbCodeBuf);
6428 if (pbCodeBuf)
6429 {
6430 Bs3MemSet(pbCodeBuf, 0xf1, X86_PAGE_SIZE * 3);
6431 bs3CpuBasic2_SetGlobals(bMode);
6432
6433 if (!BS3_MODE_IS_PAGED(bMode))
6434 bs3CpuBasic2_instr_len_Worker(bMode, pbCodeBuf);
6435 else
6436 {
6437 uint32_t const uFlatLastPg = Bs3SelPtrToFlat(pbCodeBuf) + X86_PAGE_SIZE * 2;
6438 int rc = Bs3PagingProtect(uFlatLastPg, X86_PAGE_SIZE, 0, X86_PTE_P);
6439 if (RT_SUCCESS(rc))
6440 {
6441 bs3CpuBasic2_instr_len_Worker(bMode, pbCodeBuf);
6442 Bs3PagingProtect(uFlatLastPg, X86_PAGE_SIZE, X86_PTE_P, 0);
6443 }
6444 else
6445 Bs3TestFailed("Failed to allocate 3 code pages");
6446 }
6447
6448 Bs3MemFree(pbCodeBuf, X86_PAGE_SIZE * 3);
6449 }
6450 else
6451 Bs3TestFailed("Failed to allocate 3 code pages");
6452 return 0;
6453}
6454
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette